diff --git a/py311/lib/python3.11/site-packages/Levenshtein/StringMatcher.py b/py311/lib/python3.11/site-packages/Levenshtein/StringMatcher.py
new file mode 100644
index 0000000000000000000000000000000000000000..269b98a694fc9224ba841493ac1469d485fd6555
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/Levenshtein/StringMatcher.py
@@ -0,0 +1,74 @@
+from __future__ import annotations
+
+from warnings import warn
+
+from Levenshtein import distance, editops, matching_blocks, opcodes, ratio
+
+
+class StringMatcher:
+ """A SequenceMatcher-like class built on the top of Levenshtein"""
+
+ def _reset_cache(self):
+ self._ratio = self._distance = None
+ self._opcodes = self._editops = self._matching_blocks = None
+
+ def __init__(self, isjunk=None, seq1="", seq2="", autojunk=False):
+ if isjunk:
+ warn("isjunk NOT implemented, it will be ignored", stacklevel=1)
+ if autojunk:
+ warn("autojunk NOT implemented, it will be ignored", stacklevel=1)
+ self._str1, self._str2 = seq1, seq2
+ self._reset_cache()
+
+ def set_seqs(self, seq1, seq2):
+ self._str1, self._str2 = seq1, seq2
+ self._reset_cache()
+
+ def set_seq1(self, seq1):
+ self._str1 = seq1
+ self._reset_cache()
+
+ def set_seq2(self, seq2):
+ self._str2 = seq2
+ self._reset_cache()
+
+ def get_opcodes(self):
+ if not self._opcodes:
+ if self._editops:
+ self._opcodes = opcodes(self._editops, self._str1, self._str2)
+ else:
+ self._opcodes = opcodes(self._str1, self._str2)
+ return self._opcodes
+
+ def get_editops(self):
+ if not self._editops:
+ if self._opcodes:
+ self._editops = editops(self._opcodes, self._str1, self._str2)
+ else:
+ self._editops = editops(self._str1, self._str2)
+ return self._editops
+
+ def get_matching_blocks(self):
+ if not self._matching_blocks:
+ self._matching_blocks = matching_blocks(self.get_opcodes(), self._str1, self._str2)
+ return self._matching_blocks
+
+ def ratio(self):
+ if not self._ratio:
+ self._ratio = ratio(self._str1, self._str2)
+ return self._ratio
+
+ def quick_ratio(self):
+ # This is usually quick enough :o)
+ if not self._ratio:
+ self._ratio = ratio(self._str1, self._str2)
+ return self._ratio
+
+ def real_quick_ratio(self):
+ len1, len2 = len(self._str1), len(self._str2)
+ return 2.0 * min(len1, len2) / (len1 + len2)
+
+ def distance(self):
+ if not self._distance:
+ self._distance = distance(self._str1, self._str2)
+ return self._distance
diff --git a/py311/lib/python3.11/site-packages/Levenshtein/__init__.py b/py311/lib/python3.11/site-packages/Levenshtein/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..04a929e006f22f2ccdf5efc2833b53d0e5671631
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/Levenshtein/__init__.py
@@ -0,0 +1,550 @@
+"""
+A C extension module for fast computation of:
+- Levenshtein (edit) distance and edit sequence manipulation
+- string similarity
+- approximate median strings, and generally string averaging
+- string sequence and set similarity
+
+Levenshtein has a some overlap with difflib (SequenceMatcher). It
+supports only strings, not arbitrary sequence types, but on the
+other hand it's much faster.
+
+It supports both normal and Unicode strings, but can't mix them, all
+arguments to a function (method) have to be of the same type (or its
+subclasses).
+"""
+
+from __future__ import annotations
+
+__author__: str = "Max Bachmann"
+__license__: str = "GPL"
+__version__: str = "0.27.3"
+
+import rapidfuzz.distance.Hamming as _Hamming
+import rapidfuzz.distance.Indel as _Indel
+import rapidfuzz.distance.Jaro as _Jaro
+import rapidfuzz.distance.JaroWinkler as _JaroWinkler
+import rapidfuzz.distance.Levenshtein as _Levenshtein
+from rapidfuzz.distance import (
+ Editops as _Editops,
+)
+from rapidfuzz.distance import (
+ Opcodes as _Opcodes,
+)
+
+from Levenshtein.levenshtein_cpp import (
+ median,
+ median_improve,
+ quickmedian,
+ seqratio,
+ setmedian,
+ setratio,
+)
+
+__all__ = [
+ "quickmedian",
+ "median",
+ "median_improve",
+ "setmedian",
+ "setratio",
+ "seqratio",
+ "distance",
+ "ratio",
+ "hamming",
+ "jaro",
+ "jaro_winkler",
+ "editops",
+ "opcodes",
+ "matching_blocks",
+ "apply_edit",
+ "subtract_edit",
+ "inverse",
+]
+
+
+def distance(s1, s2, *, weights=(1, 1, 1), processor=None, score_cutoff=None, score_hint=None):
+ """
+ Calculates the minimum number of insertions, deletions, and substitutions
+ required to change one sequence into the other according to Levenshtein with custom
+ costs for insertion, deletion and substitution
+
+ Parameters
+ ----------
+ s1 : Sequence[Hashable]
+ First string to compare.
+ s2 : Sequence[Hashable]
+ Second string to compare.
+ weights : Tuple[int, int, int] or None, optional
+ The weights for the three operations in the form
+ (insertion, deletion, substitution). Default is (1, 1, 1),
+ which gives all three operations a weight of 1.
+ processor: callable, optional
+ Optional callable that is used to preprocess the strings before
+ comparing them. Default is None, which deactivates this behaviour.
+ score_cutoff : int, optional
+ Maximum distance between s1 and s2, that is
+ considered as a result. If the distance is bigger than score_cutoff,
+ score_cutoff + 1 is returned instead. Default is None, which deactivates
+ this behaviour.
+ score_hint : int, optional
+ Expected distance between s1 and s2. This is used to select a
+ faster implementation. Default is None, which deactivates this behaviour.
+
+ Returns
+ -------
+ distance : int
+ distance between s1 and s2
+
+ Raises
+ ------
+ ValueError
+ If unsupported weights are provided a ValueError is thrown
+
+ Examples
+ --------
+ Find the Levenshtein distance between two strings:
+
+ >>> from Levenshtein import distance
+ >>> distance("lewenstein", "levenshtein")
+ 2
+
+ Setting a maximum distance allows the implementation to select
+ a more efficient implementation:
+
+ >>> distance("lewenstein", "levenshtein", score_cutoff=1)
+ 2
+
+ It is possible to select different weights by passing a `weight`
+ tuple.
+
+ >>> distance("lewenstein", "levenshtein", weights=(1,1,2))
+ 3
+ """
+ return _Levenshtein.distance(
+ s1,
+ s2,
+ weights=weights,
+ processor=processor,
+ score_cutoff=score_cutoff,
+ score_hint=score_hint,
+ )
+
+
+def ratio(s1, s2, *, processor=None, score_cutoff=None):
+ """
+ Calculates a normalized indel similarity in the range [0, 1].
+ The indel distance calculates the minimum number of insertions and deletions
+ required to change one sequence into the other.
+
+ This is calculated as ``1 - (distance / (len1 + len2))``
+
+ Parameters
+ ----------
+ s1 : Sequence[Hashable]
+ First string to compare.
+ s2 : Sequence[Hashable]
+ Second string to compare.
+ processor: callable, optional
+ Optional callable that is used to preprocess the strings before
+ comparing them. Default is None, which deactivates this behaviour.
+ score_cutoff : float, optional
+ Optional argument for a score threshold as a float between 0 and 1.0.
+ For norm_sim < score_cutoff 0 is returned instead. Default is 0,
+ which deactivates this behaviour.
+
+ Returns
+ -------
+ norm_sim : float
+ normalized similarity between s1 and s2 as a float between 0 and 1.0
+
+ Examples
+ --------
+ Find the normalized Indel similarity between two strings:
+
+ >>> from Levenshtein import ratio
+ >>> ratio("lewenstein", "levenshtein")
+ 0.85714285714285
+
+ Setting a score_cutoff allows the implementation to select
+ a more efficient implementation:
+
+ >>> ratio("lewenstein", "levenshtein", score_cutoff=0.9)
+ 0.0
+
+ When a different processor is used s1 and s2 do not have to be strings
+
+ >>> ratio(["lewenstein"], ["levenshtein"], processor=lambda s: s[0])
+ 0.8571428571428572
+ """
+ return _Indel.normalized_similarity(s1, s2, processor=processor, score_cutoff=score_cutoff)
+
+
+def hamming(s1, s2, *, pad=True, processor=None, score_cutoff=None):
+ """
+ Calculates the Hamming distance between two strings.
+ The hamming distance is defined as the number of positions
+ where the two strings differ. It describes the minimum
+ amount of substitutions required to transform s1 into s2.
+
+ Parameters
+ ----------
+ s1 : Sequence[Hashable]
+ First string to compare.
+ s2 : Sequence[Hashable]
+ Second string to compare.
+ pad : bool, optional
+ should strings be padded if there is a length difference.
+ If pad is False and strings have a different length
+ a ValueError is thrown instead. Default is True.
+ processor: callable, optional
+ Optional callable that is used to preprocess the strings before
+ comparing them. Default is None, which deactivates this behaviour.
+ score_cutoff : int or None, optional
+ Maximum distance between s1 and s2, that is
+ considered as a result. If the distance is bigger than score_cutoff,
+ score_cutoff + 1 is returned instead. Default is None, which deactivates
+ this behaviour.
+
+ Returns
+ -------
+ distance : int
+ distance between s1 and s2
+
+ Raises
+ ------
+ ValueError
+ If s1 and s2 have a different length
+ """
+ return _Hamming.distance(s1, s2, pad=pad, processor=processor, score_cutoff=score_cutoff)
+
+
+def jaro(s1, s2, *, processor=None, score_cutoff=None) -> float:
+ """
+ Calculates the jaro similarity
+
+ Parameters
+ ----------
+ s1 : Sequence[Hashable]
+ First string to compare.
+ s2 : Sequence[Hashable]
+ Second string to compare.
+ processor: callable, optional
+ Optional callable that is used to preprocess the strings before
+ comparing them. Default is None, which deactivates this behaviour.
+ score_cutoff : float, optional
+ Optional argument for a score threshold as a float between 0 and 1.0.
+ For ratio < score_cutoff 0 is returned instead. Default is None,
+ which deactivates this behaviour.
+
+ Returns
+ -------
+ similarity : float
+ similarity between s1 and s2 as a float between 0 and 1.0
+ """
+ return _Jaro.similarity(s1, s2, processor=processor, score_cutoff=score_cutoff)
+
+
+def jaro_winkler(s1, s2, *, prefix_weight=0.1, processor=None, score_cutoff=None) -> float:
+ """
+ Calculates the jaro winkler similarity
+
+ Parameters
+ ----------
+ s1 : Sequence[Hashable]
+ First string to compare.
+ s2 : Sequence[Hashable]
+ Second string to compare.
+ prefix_weight : float, optional
+ Weight used for the common prefix of the two strings.
+ Has to be between 0 and 0.25. Default is 0.1.
+ processor: callable, optional
+ Optional callable that is used to preprocess the strings before
+ comparing them. Default is None, which deactivates this behaviour.
+ score_cutoff : float, optional
+ Optional argument for a score threshold as a float between 0 and 1.0.
+ For ratio < score_cutoff 0 is returned instead. Default is None,
+ which deactivates this behaviour.
+
+ Returns
+ -------
+ similarity : float
+ similarity between s1 and s2 as a float between 0 and 1.0
+
+ Raises
+ ------
+ ValueError
+ If prefix_weight is invalid
+ """
+ return _JaroWinkler.similarity(
+ s1,
+ s2,
+ prefix_weight=prefix_weight,
+ processor=processor,
+ score_cutoff=score_cutoff,
+ )
+
+
+# assign attributes to function. This allows rapidfuzz to call them more efficiently
+# we can't directly copy the functions + replace the docstrings, since this leads to
+# crashes on PyPy
+distance._RF_OriginalScorer = distance
+ratio._RF_OriginalScorer = ratio
+hamming._RF_OriginalScorer = hamming
+jaro._RF_OriginalScorer = jaro
+jaro_winkler._RF_OriginalScorer = jaro_winkler
+
+distance._RF_ScorerPy = _Levenshtein.distance._RF_ScorerPy
+ratio._RF_ScorerPy = _Indel.normalized_similarity._RF_ScorerPy
+hamming._RF_ScorerPy = _Hamming.distance._RF_ScorerPy
+jaro._RF_ScorerPy = _Jaro.similarity._RF_ScorerPy
+jaro_winkler._RF_ScorerPy = _JaroWinkler.similarity._RF_ScorerPy
+
+if hasattr(_Levenshtein.distance, "_RF_Scorer"):
+ distance._RF_Scorer = _Levenshtein.distance._RF_Scorer
+if hasattr(_Indel.normalized_similarity, "_RF_Scorer"):
+ ratio._RF_Scorer = _Indel.normalized_similarity._RF_Scorer
+if hasattr(_Hamming.distance, "_RF_Scorer"):
+ hamming._RF_Scorer = _Hamming.distance._RF_Scorer
+if hasattr(_Jaro.similarity, "_RF_Scorer"):
+ jaro._RF_Scorer = _Jaro.similarity._RF_Scorer
+if hasattr(_JaroWinkler.similarity, "_RF_Scorer"):
+ jaro_winkler._RF_Scorer = _JaroWinkler.similarity._RF_Scorer
+
+
+def editops(*args):
+ """
+ Find sequence of edit operations transforming one string to another.
+
+ editops(source_string, destination_string)
+ editops(edit_operations, source_length, destination_length)
+
+ The result is a list of triples (operation, spos, dpos), where
+ operation is one of 'equal', 'replace', 'insert', or 'delete'; spos
+ and dpos are position of characters in the first (source) and the
+ second (destination) strings. These are operations on single
+ characters. In fact the returned list doesn't contain the 'equal',
+ but all the related functions accept both lists with and without
+ 'equal's.
+
+ Examples
+ --------
+ >>> editops('spam', 'park')
+ [('delete', 0, 0), ('insert', 3, 2), ('replace', 3, 3)]
+
+ The alternate form editops(opcodes, source_string, destination_string)
+ can be used for conversion from opcodes (5-tuples) to editops (you can
+ pass strings or their lengths, it doesn't matter).
+ """
+ # convert: we were called (bops, s1, s2)
+ if len(args) == 3:
+ arg1, arg2, arg3 = args
+ len1 = arg2 if isinstance(arg2, int) else len(arg2)
+ len2 = arg3 if isinstance(arg3, int) else len(arg3)
+ return _Editops(arg1, len1, len2).as_list()
+
+ # find editops: we were called (s1, s2)
+ arg1, arg2 = args
+ return _Levenshtein.editops(arg1, arg2).as_list()
+
+
+def opcodes(*args):
+ """
+ Find sequence of edit operations transforming one string to another.
+
+ opcodes(source_string, destination_string)
+ opcodes(edit_operations, source_length, destination_length)
+
+ The result is a list of 5-tuples with the same meaning as in
+ SequenceMatcher's get_opcodes() output. But since the algorithms
+ differ, the actual sequences from Levenshtein and SequenceMatcher
+ may differ too.
+
+ Examples
+ --------
+ >>> for x in opcodes('spam', 'park'):
+ ... print(x)
+ ...
+ ('delete', 0, 1, 0, 0)
+ ('equal', 1, 3, 0, 2)
+ ('insert', 3, 3, 2, 3)
+ ('replace', 3, 4, 3, 4)
+
+ The alternate form opcodes(editops, source_string, destination_string)
+ can be used for conversion from editops (triples) to opcodes (you can
+ pass strings or their lengths, it doesn't matter).
+ """
+ # convert: we were called (ops, s1, s2)
+ if len(args) == 3:
+ arg1, arg2, arg3 = args
+ len1 = arg2 if isinstance(arg2, int) else len(arg2)
+ len2 = arg3 if isinstance(arg3, int) else len(arg3)
+ return _Opcodes(arg1, len1, len2).as_list()
+
+ # find editops: we were called (s1, s2)
+ arg1, arg2 = args
+ return _Levenshtein.opcodes(arg1, arg2).as_list()
+
+
+def matching_blocks(edit_operations, source_string, destination_string):
+ """
+ Find identical blocks in two strings.
+
+ Parameters
+ ----------
+ edit_operations : list[]
+ editops or opcodes created for the source and destination string
+ source_string : str | int
+ source string or the length of the source string
+ destination_string : str | int
+ destination string or the length of the destination string
+
+ Returns
+ -------
+ matching_blocks : list[]
+ List of triples with the same meaning as in SequenceMatcher's
+ get_matching_blocks() output.
+
+ Examples
+ --------
+ >>> a, b = 'spam', 'park'
+ >>> matching_blocks(editops(a, b), a, b)
+ [(1, 0, 2), (4, 4, 0)]
+ >>> matching_blocks(editops(a, b), len(a), len(b))
+ [(1, 0, 2), (4, 4, 0)]
+
+ The last zero-length block is not an error, but it's there for
+ compatibility with difflib which always emits it.
+
+ One can join the matching blocks to get two identical strings:
+
+ >>> a, b = 'dog kennels', 'mattresses'
+ >>> mb = matching_blocks(editops(a,b), a, b)
+ >>> ''.join([a[x[0]:x[0]+x[2]] for x in mb])
+ 'ees'
+ >>> ''.join([b[x[1]:x[1]+x[2]] for x in mb])
+ 'ees'
+ """
+ len1 = source_string if isinstance(source_string, int) else len(source_string)
+ len2 = destination_string if isinstance(destination_string, int) else len(destination_string)
+
+ if not edit_operations or len(edit_operations[0]) == 3:
+ return _Editops(edit_operations, len1, len2).as_matching_blocks()
+
+ return _Opcodes(edit_operations, len1, len2).as_matching_blocks()
+
+
+def apply_edit(edit_operations, source_string, destination_string):
+ """
+ Apply a sequence of edit operations to a string.
+
+ apply_edit(edit_operations, source_string, destination_string)
+
+ In the case of editops, the sequence can be arbitrary ordered subset
+ of the edit sequence transforming source_string to destination_string.
+
+ Examples
+ --------
+ >>> e = editops('man', 'scotsman')
+ >>> apply_edit(e, 'man', 'scotsman')
+ 'scotsman'
+ >>> apply_edit(e[:3], 'man', 'scotsman')
+ 'scoman'
+
+ The other form of edit operations, opcodes, is not very suitable for
+ such a tricks, because it has to always span over complete strings,
+ subsets can be created by carefully replacing blocks with 'equal'
+ blocks, or by enlarging 'equal' block at the expense of other blocks
+ and adjusting the other blocks accordingly.
+
+ >>> a, b = 'spam and eggs', 'foo and bar'
+ >>> e = opcodes(a, b)
+ >>> apply_edit(inverse(e), b, a)
+ 'spam and eggs'
+ """
+ if len(edit_operations) == 0:
+ return source_string
+
+ len1 = len(source_string)
+ len2 = len(destination_string)
+
+ if len(edit_operations[0]) == 3:
+ return _Editops(edit_operations, len1, len2).apply(source_string, destination_string)
+
+ return _Opcodes(edit_operations, len1, len2).apply(source_string, destination_string)
+
+
+def subtract_edit(edit_operations, subsequence):
+ """
+ Subtract an edit subsequence from a sequence.
+
+ subtract_edit(edit_operations, subsequence)
+
+ The result is equivalent to
+ editops(apply_edit(subsequence, s1, s2), s2), except that is
+ constructed directly from the edit operations. That is, if you apply
+ it to the result of subsequence application, you get the same final
+ string as from application complete edit_operations. It may be not
+ identical, though (in amibuous cases, like insertion of a character
+ next to the same character).
+
+ The subtracted subsequence must be an ordered subset of
+ edit_operations.
+
+ Note this function does not accept difflib-style opcodes as no one in
+ his right mind wants to create subsequences from them.
+
+ Examples
+ --------
+ >>> e = editops('man', 'scotsman')
+ >>> e1 = e[:3]
+ >>> bastard = apply_edit(e1, 'man', 'scotsman')
+ >>> bastard
+ 'scoman'
+ >>> apply_edit(subtract_edit(e, e1), bastard, 'scotsman')
+ 'scotsman'
+ """
+ str_len = 2**32
+ return (
+ _Editops(edit_operations, str_len, str_len)
+ .remove_subsequence(_Editops(subsequence, str_len, str_len))
+ .as_list()
+ )
+
+
+def inverse(edit_operations):
+ """
+ Invert the sense of an edit operation sequence.
+
+ In other words, it returns a list of edit operations transforming the
+ second (destination) string to the first (source). It can be used
+ with both editops and opcodes.
+
+ Parameters
+ ----------
+ edit_operations : list[]
+ edit operations to invert
+
+ Returns
+ -------
+ edit_operations : list[]
+ inverted edit operations
+
+ Examples
+ --------
+ >>> editops('spam', 'park')
+ [('delete', 0, 0), ('insert', 3, 2), ('replace', 3, 3)]
+ >>> inverse(editops('spam', 'park'))
+ [('insert', 0, 0), ('delete', 2, 3), ('replace', 3, 3)]
+ """
+ if len(edit_operations) == 0:
+ return []
+
+ if len(edit_operations[0]) == 3:
+ len1 = edit_operations[-1][1] + 1
+ len2 = edit_operations[-1][2] + 1
+ return _Editops(edit_operations, len1, len2).inverse().as_list()
+
+ len1 = edit_operations[-1][2]
+ len2 = edit_operations[-1][4]
+
+ return _Opcodes(edit_operations, len1, len2).inverse().as_list()
diff --git a/py311/lib/python3.11/site-packages/Levenshtein/__init__.pyi b/py311/lib/python3.11/site-packages/Levenshtein/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..8b51e2709c32bbc7867a111b0fe3b495094f3781
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/Levenshtein/__init__.pyi
@@ -0,0 +1,87 @@
+from __future__ import annotations
+
+from collections.abc import Callable, Hashable, Sequence
+from typing import overload
+
+__author__: str
+__license__: str
+__version__: str
+
+_EditopsList = list[tuple[str, int, int]]
+_OpcodesList = list[tuple[str, int, int, int, int]]
+_MatchingBlocks = list[tuple[int, int, int]]
+_AnyEditops = _EditopsList | _OpcodesList
+
+def inverse(edit_operations: list) -> list: ...
+@overload
+def editops(s1: Sequence[Hashable], s2: Sequence[Hashable]) -> _EditopsList: ...
+@overload
+def editops(
+ ops: _AnyEditops,
+ s1: Sequence[Hashable] | int,
+ s2: Sequence[Hashable] | int,
+) -> _EditopsList: ...
+@overload
+def opcodes(s1: Sequence[Hashable], s2: Sequence[Hashable]) -> _OpcodesList: ...
+@overload
+def opcodes(
+ ops: _AnyEditops,
+ s1: Sequence[Hashable] | int,
+ s2: Sequence[Hashable] | int,
+) -> _OpcodesList: ...
+def matching_blocks(
+ edit_operations: _AnyEditops,
+ source_string: Sequence[Hashable] | int,
+ destination_string: Sequence[Hashable] | int,
+) -> _MatchingBlocks: ...
+def subtract_edit(edit_operations: _EditopsList, subsequence: _EditopsList) -> _EditopsList: ...
+def apply_edit(edit_operations: _AnyEditops, source_string: str, destination_string: str) -> str: ...
+def median(strlist: list[str | bytes], wlist: list[float] | None = None) -> str: ...
+def quickmedian(strlist: list[str | bytes], wlist: list[float] | None = None) -> str: ...
+def median_improve(
+ string: str | bytes,
+ strlist: list[str | bytes],
+ wlist: list[float] | None = None,
+) -> str: ...
+def setmedian(strlist: list[str | bytes], wlist: list[float] | None = None) -> str: ...
+def setratio(strlist1: list[str | bytes], strlist2: list[str | bytes]) -> float: ...
+def seqratio(strlist1: list[str | bytes], strlist2: list[str | bytes]) -> float: ...
+def distance(
+ s1: Sequence[Hashable],
+ s2: Sequence[Hashable],
+ *,
+ weights: tuple[int, int, int] | None = (1, 1, 1),
+ processor: Callable[..., Sequence[Hashable]] | None = None,
+ score_cutoff: float | None = None,
+ score_hint: float | None = None,
+) -> int: ...
+def ratio(
+ s1: Sequence[Hashable],
+ s2: Sequence[Hashable],
+ *,
+ processor: Callable[..., Sequence[Hashable]] | None = None,
+ score_cutoff: float | None = None,
+) -> float: ...
+def hamming(
+ s1: Sequence[Hashable],
+ s2: Sequence[Hashable],
+ *,
+ pad: bool = True,
+ processor: Callable[..., Sequence[Hashable]] | None = None,
+ score_cutoff: float | None = None,
+) -> int: ...
+def jaro(
+ s1: Sequence[Hashable],
+ s2: Sequence[Hashable],
+ *,
+ processor: Callable[..., Sequence[Hashable]] | None = None,
+ score_cutoff: float | None = None,
+) -> float: ...
+def jaro_winkler(
+ s1: Sequence[Hashable],
+ s2: Sequence[Hashable],
+ *,
+ prefix_weight: float | None = 0.1,
+ processor: Callable[..., Sequence[Hashable]] | None = None,
+ score_cutoff: float | None = None,
+) -> float: ...
diff --git a/py311/lib/python3.11/site-packages/Levenshtein/py.typed b/py311/lib/python3.11/site-packages/Levenshtein/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/INSTALLER b/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+uv
\ No newline at end of file
diff --git a/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/LICENSE b/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..f26bcf4d2de6eb136e31006ca3ab447d5e488adf
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/LICENSE
@@ -0,0 +1,279 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC. Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see https://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team. In October of the same
+year, the PythonLabs team moved to Digital Creations, which became
+Zope Corporation. In 2001, the Python Software Foundation (PSF, see
+https://www.python.org/psf/) was formed, a non-profit organization
+created specifically to own Python-related Intellectual Property.
+Zope Corporation was a sponsoring member of the PSF.
+
+All Python releases are Open Source (see https://opensource.org for
+the Open Source Definition). Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+ Release Derived Year Owner GPL-
+ from compatible? (1)
+
+ 0.9.0 thru 1.2 1991-1995 CWI yes
+ 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
+ 1.6 1.5.2 2000 CNRI no
+ 2.0 1.6 2000 BeOpen.com no
+ 1.6.1 1.6 2001 CNRI yes (2)
+ 2.1 2.0+1.6.1 2001 PSF no
+ 2.0.1 2.0+1.6.1 2001 PSF yes
+ 2.1.1 2.1+2.0.1 2001 PSF yes
+ 2.1.2 2.1.1 2002 PSF yes
+ 2.1.3 2.1.2 2002 PSF yes
+ 2.2 and above 2.1.1 2001-now PSF yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+ the GPL. All Python licenses, unlike the GPL, let you distribute
+ a modified version without making your changes open source. The
+ GPL-compatible licenses make it possible to combine Python with
+ other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+ because its license has a choice of law clause. According to
+ CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+ is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+Python software and documentation are licensed under the
+Python Software Foundation License Version 2.
+
+Starting with Python 3.8.6, examples, recipes, and other code in
+the documentation are dual licensed under the PSF License Version 2
+and the Zero-Clause BSD license.
+
+Some software incorporated into Python is under different licenses.
+The licenses are listed with code falling under that license.
+
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions. Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee. This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party. As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee. Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement. This Agreement together with
+Python 1.6.1 may be located on the internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013. This
+Agreement may also be obtained from a proxy server on the internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee. This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+ ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands. All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
+----------------------------------------------------------------------
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/METADATA b/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..c632040d66bf120a377fc3785940934361273a66
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/METADATA
@@ -0,0 +1,123 @@
+Metadata-Version: 2.3
+Name: aiohappyeyeballs
+Version: 2.6.1
+Summary: Happy Eyeballs for asyncio
+License: PSF-2.0
+Author: J. Nick Koston
+Author-email: nick@koston.org
+Requires-Python: >=3.9
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Project-URL: Bug Tracker, https://github.com/aio-libs/aiohappyeyeballs/issues
+Project-URL: Changelog, https://github.com/aio-libs/aiohappyeyeballs/blob/main/CHANGELOG.md
+Project-URL: Documentation, https://aiohappyeyeballs.readthedocs.io
+Project-URL: Repository, https://github.com/aio-libs/aiohappyeyeballs
+Description-Content-Type: text/markdown
+
+# aiohappyeyeballs
+
+
+
+---
+
+**Documentation**: https://aiohappyeyeballs.readthedocs.io
+
+**Source Code**: https://github.com/aio-libs/aiohappyeyeballs
+
+---
+
+[Happy Eyeballs](https://en.wikipedia.org/wiki/Happy_Eyeballs)
+([RFC 8305](https://www.rfc-editor.org/rfc/rfc8305.html))
+
+## Use case
+
+This library exists to allow connecting with
+[Happy Eyeballs](https://en.wikipedia.org/wiki/Happy_Eyeballs)
+([RFC 8305](https://www.rfc-editor.org/rfc/rfc8305.html))
+when you
+already have a list of addrinfo and not a DNS name.
+
+The stdlib version of `loop.create_connection()`
+will only work when you pass in an unresolved name which
+is not a good fit when using DNS caching or resolving
+names via another method such as `zeroconf`.
+
+## Installation
+
+Install this via pip (or your favourite package manager):
+
+`pip install aiohappyeyeballs`
+
+## License
+
+[aiohappyeyeballs is licensed under the same terms as cpython itself.](https://github.com/python/cpython/blob/main/LICENSE)
+
+## Example usage
+
+```python
+
+addr_infos = await loop.getaddrinfo("example.org", 80)
+
+socket = await start_connection(addr_infos)
+socket = await start_connection(addr_infos, local_addr_infos=local_addr_infos, happy_eyeballs_delay=0.2)
+
+transport, protocol = await loop.create_connection(
+ MyProtocol, sock=socket, ...)
+
+# Remove the first address for each family from addr_info
+pop_addr_infos_interleave(addr_info, 1)
+
+# Remove all matching address from addr_info
+remove_addr_infos(addr_info, "dead::beef::")
+
+# Convert a local_addr to local_addr_infos
+local_addr_infos = addr_to_addr_infos(("127.0.0.1",0))
+```
+
+## Credits
+
+This package contains code from cpython and is licensed under the same terms as cpython itself.
+
+This package was created with
+[Copier](https://copier.readthedocs.io/) and the
+[browniebroke/pypackage-template](https://github.com/browniebroke/pypackage-template)
+project template.
+
diff --git a/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/RECORD b/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..f98b55aafa7c029ddda6f3fea78ced5742c20e51
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/RECORD
@@ -0,0 +1,12 @@
+aiohappyeyeballs-2.6.1.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2
+aiohappyeyeballs-2.6.1.dist-info/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936
+aiohappyeyeballs-2.6.1.dist-info/METADATA,sha256=NSXlhJwAfi380eEjAo7BQ4P_TVal9xi0qkyZWibMsVM,5915
+aiohappyeyeballs-2.6.1.dist-info/RECORD,,
+aiohappyeyeballs-2.6.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+aiohappyeyeballs-2.6.1.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
+aiohappyeyeballs/__init__.py,sha256=x7kktHEtaD9quBcWDJPuLeKyjuVAI-Jj14S9B_5hcTs,361
+aiohappyeyeballs/_staggered.py,sha256=edfVowFx-P-ywJjIEF3MdPtEMVODujV6CeMYr65otac,6900
+aiohappyeyeballs/impl.py,sha256=Dlcm2mTJ28ucrGnxkb_fo9CZzLAkOOBizOt7dreBbXE,9681
+aiohappyeyeballs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+aiohappyeyeballs/types.py,sha256=YZJIAnyoV4Dz0WFtlaf_OyE4EW7Xus1z7aIfNI6tDDQ,425
+aiohappyeyeballs/utils.py,sha256=on9GxIR0LhEfZu8P6Twi9hepX9zDanuZM20MWsb3xlQ,3028
diff --git a/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/REQUESTED b/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/REQUESTED
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/WHEEL b/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..0582547b15f02d3a51659106262832565d5dc5ea
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/aiohappyeyeballs-2.6.1.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: poetry-core 2.1.1
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/py311/lib/python3.11/site-packages/attr/__init__.py b/py311/lib/python3.11/site-packages/attr/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..5c6e0650bc4bf53806420d7ef5f881ecd2bd77ea
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/__init__.py
@@ -0,0 +1,104 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Classes Without Boilerplate
+"""
+
+from functools import partial
+from typing import Callable, Literal, Protocol
+
+from . import converters, exceptions, filters, setters, validators
+from ._cmp import cmp_using
+from ._config import get_run_validators, set_run_validators
+from ._funcs import asdict, assoc, astuple, has, resolve_types
+from ._make import (
+ NOTHING,
+ Attribute,
+ Converter,
+ Factory,
+ _Nothing,
+ attrib,
+ attrs,
+ evolve,
+ fields,
+ fields_dict,
+ make_class,
+ validate,
+)
+from ._next_gen import define, field, frozen, mutable
+from ._version_info import VersionInfo
+
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
+
+
+class AttrsInstance(Protocol):
+ pass
+
+
+NothingType = Literal[_Nothing.NOTHING]
+
+__all__ = [
+ "NOTHING",
+ "Attribute",
+ "AttrsInstance",
+ "Converter",
+ "Factory",
+ "NothingType",
+ "asdict",
+ "assoc",
+ "astuple",
+ "attr",
+ "attrib",
+ "attributes",
+ "attrs",
+ "cmp_using",
+ "converters",
+ "define",
+ "evolve",
+ "exceptions",
+ "field",
+ "fields",
+ "fields_dict",
+ "filters",
+ "frozen",
+ "get_run_validators",
+ "has",
+ "ib",
+ "make_class",
+ "mutable",
+ "resolve_types",
+ "s",
+ "set_run_validators",
+ "setters",
+ "validate",
+ "validators",
+]
+
+
+def _make_getattr(mod_name: str) -> Callable:
+ """
+ Create a metadata proxy for packaging information that uses *mod_name* in
+ its warnings and errors.
+ """
+
+ def __getattr__(name: str) -> str:
+ if name not in ("__version__", "__version_info__"):
+ msg = f"module {mod_name} has no attribute {name}"
+ raise AttributeError(msg)
+
+ from importlib.metadata import metadata
+
+ meta = metadata("attrs")
+
+ if name == "__version_info__":
+ return VersionInfo._from_version_string(meta["version"])
+
+ return meta["version"]
+
+ return __getattr__
+
+
+__getattr__ = _make_getattr(__name__)
diff --git a/py311/lib/python3.11/site-packages/attr/__init__.pyi b/py311/lib/python3.11/site-packages/attr/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..8d78fa19abe1a529af98889eb40017c32d5b62a2
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/__init__.pyi
@@ -0,0 +1,389 @@
+import enum
+import sys
+
+from typing import (
+ Any,
+ Callable,
+ Generic,
+ Literal,
+ Mapping,
+ Protocol,
+ Sequence,
+ TypeVar,
+ overload,
+)
+
+# `import X as X` is required to make these public
+from . import converters as converters
+from . import exceptions as exceptions
+from . import filters as filters
+from . import setters as setters
+from . import validators as validators
+from ._cmp import cmp_using as cmp_using
+from ._typing_compat import AttrsInstance_
+from ._version_info import VersionInfo
+from attrs import (
+ define as define,
+ field as field,
+ mutable as mutable,
+ frozen as frozen,
+ _EqOrderType,
+ _ValidatorType,
+ _ConverterType,
+ _ReprArgType,
+ _OnSetAttrType,
+ _OnSetAttrArgType,
+ _FieldTransformer,
+ _ValidatorArgType,
+)
+
+if sys.version_info >= (3, 10):
+ from typing import TypeGuard, TypeAlias
+else:
+ from typing_extensions import TypeGuard, TypeAlias
+
+if sys.version_info >= (3, 11):
+ from typing import dataclass_transform
+else:
+ from typing_extensions import dataclass_transform
+
+__version__: str
+__version_info__: VersionInfo
+__title__: str
+__description__: str
+__url__: str
+__uri__: str
+__author__: str
+__email__: str
+__license__: str
+__copyright__: str
+
+_T = TypeVar("_T")
+_C = TypeVar("_C", bound=type)
+
+_FilterType = Callable[["Attribute[_T]", _T], bool]
+
+# We subclass this here to keep the protocol's qualified name clean.
+class AttrsInstance(AttrsInstance_, Protocol):
+ pass
+
+_A = TypeVar("_A", bound=type[AttrsInstance])
+
+class _Nothing(enum.Enum):
+ NOTHING = enum.auto()
+
+NOTHING = _Nothing.NOTHING
+NothingType: TypeAlias = Literal[_Nothing.NOTHING]
+
+# NOTE: Factory lies about its return type to make this possible:
+# `x: List[int] # = Factory(list)`
+# Work around mypy issue #4554 in the common case by using an overload.
+
+@overload
+def Factory(factory: Callable[[], _T]) -> _T: ...
+@overload
+def Factory(
+ factory: Callable[[Any], _T],
+ takes_self: Literal[True],
+) -> _T: ...
+@overload
+def Factory(
+ factory: Callable[[], _T],
+ takes_self: Literal[False],
+) -> _T: ...
+
+In = TypeVar("In")
+Out = TypeVar("Out")
+
+class Converter(Generic[In, Out]):
+ @overload
+ def __init__(self, converter: Callable[[In], Out]) -> None: ...
+ @overload
+ def __init__(
+ self,
+ converter: Callable[[In, AttrsInstance, Attribute], Out],
+ *,
+ takes_self: Literal[True],
+ takes_field: Literal[True],
+ ) -> None: ...
+ @overload
+ def __init__(
+ self,
+ converter: Callable[[In, Attribute], Out],
+ *,
+ takes_field: Literal[True],
+ ) -> None: ...
+ @overload
+ def __init__(
+ self,
+ converter: Callable[[In, AttrsInstance], Out],
+ *,
+ takes_self: Literal[True],
+ ) -> None: ...
+
+class Attribute(Generic[_T]):
+ name: str
+ default: _T | None
+ validator: _ValidatorType[_T] | None
+ repr: _ReprArgType
+ cmp: _EqOrderType
+ eq: _EqOrderType
+ order: _EqOrderType
+ hash: bool | None
+ init: bool
+ converter: Converter | None
+ metadata: dict[Any, Any]
+ type: type[_T] | None
+ kw_only: bool
+ on_setattr: _OnSetAttrType
+ alias: str | None
+
+ def evolve(self, **changes: Any) -> "Attribute[Any]": ...
+
+# NOTE: We had several choices for the annotation to use for type arg:
+# 1) Type[_T]
+# - Pros: Handles simple cases correctly
+# - Cons: Might produce less informative errors in the case of conflicting
+# TypeVars e.g. `attr.ib(default='bad', type=int)`
+# 2) Callable[..., _T]
+# - Pros: Better error messages than #1 for conflicting TypeVars
+# - Cons: Terrible error messages for validator checks.
+# e.g. attr.ib(type=int, validator=validate_str)
+# -> error: Cannot infer function type argument
+# 3) type (and do all of the work in the mypy plugin)
+# - Pros: Simple here, and we could customize the plugin with our own errors.
+# - Cons: Would need to write mypy plugin code to handle all the cases.
+# We chose option #1.
+
+# `attr` lies about its return type to make the following possible:
+# attr() -> Any
+# attr(8) -> int
+# attr(validator=) -> Whatever the callable expects.
+# This makes this type of assignments possible:
+# x: int = attr(8)
+#
+# This form catches explicit None or no default but with no other arguments
+# returns Any.
+@overload
+def attrib(
+ default: None = ...,
+ validator: None = ...,
+ repr: _ReprArgType = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ type: None = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool | None = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def attrib(
+ default: None = ...,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ type: type[_T] | None = ...,
+ converter: _ConverterType
+ | list[_ConverterType]
+ | tuple[_ConverterType]
+ | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool | None = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def attrib(
+ default: _T,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ type: type[_T] | None = ...,
+ converter: _ConverterType
+ | list[_ConverterType]
+ | tuple[_ConverterType]
+ | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool | None = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def attrib(
+ default: _T | None = ...,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ type: object = ...,
+ converter: _ConverterType
+ | list[_ConverterType]
+ | tuple[_ConverterType]
+ | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool | None = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+) -> Any: ...
+@overload
+@dataclass_transform(order_default=True, field_specifiers=(attrib, field))
+def attrs(
+ maybe_cls: _C,
+ these: dict[str, Any] | None = ...,
+ repr_ns: str | None = ...,
+ repr: bool = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ auto_detect: bool = ...,
+ collect_by_mro: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+ unsafe_hash: bool | None = ...,
+) -> _C: ...
+@overload
+@dataclass_transform(order_default=True, field_specifiers=(attrib, field))
+def attrs(
+ maybe_cls: None = ...,
+ these: dict[str, Any] | None = ...,
+ repr_ns: str | None = ...,
+ repr: bool = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ auto_detect: bool = ...,
+ collect_by_mro: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+ unsafe_hash: bool | None = ...,
+) -> Callable[[_C], _C]: ...
+def fields(cls: type[AttrsInstance]) -> Any: ...
+def fields_dict(cls: type[AttrsInstance]) -> dict[str, Attribute[Any]]: ...
+def validate(inst: AttrsInstance) -> None: ...
+def resolve_types(
+ cls: _A,
+ globalns: dict[str, Any] | None = ...,
+ localns: dict[str, Any] | None = ...,
+ attribs: list[Attribute[Any]] | None = ...,
+ include_extras: bool = ...,
+) -> _A: ...
+
+# TODO: add support for returning a proper attrs class from the mypy plugin
+# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
+# [attr.ib()])` is valid
+def make_class(
+ name: str,
+ attrs: list[str] | tuple[str, ...] | dict[str, Any],
+ bases: tuple[type, ...] = ...,
+ class_body: dict[str, Any] | None = ...,
+ repr_ns: str | None = ...,
+ repr: bool = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ collect_by_mro: bool = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+) -> type: ...
+
+# _funcs --
+
+# TODO: add support for returning TypedDict from the mypy plugin
+# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
+# these:
+# https://github.com/python/mypy/issues/4236
+# https://github.com/python/typing/issues/253
+# XXX: remember to fix attrs.asdict/astuple too!
+def asdict(
+ inst: AttrsInstance,
+ recurse: bool = ...,
+ filter: _FilterType[Any] | None = ...,
+ dict_factory: type[Mapping[Any, Any]] = ...,
+ retain_collection_types: bool = ...,
+ value_serializer: Callable[[type, Attribute[Any], Any], Any] | None = ...,
+ tuple_keys: bool | None = ...,
+) -> dict[str, Any]: ...
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+def astuple(
+ inst: AttrsInstance,
+ recurse: bool = ...,
+ filter: _FilterType[Any] | None = ...,
+ tuple_factory: type[Sequence[Any]] = ...,
+ retain_collection_types: bool = ...,
+) -> tuple[Any, ...]: ...
+def has(cls: type) -> TypeGuard[type[AttrsInstance]]: ...
+def assoc(inst: _T, **changes: Any) -> _T: ...
+def evolve(inst: _T, **changes: Any) -> _T: ...
+
+# _config --
+
+def set_run_validators(run: bool) -> None: ...
+def get_run_validators() -> bool: ...
+
+# aliases --
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
diff --git a/py311/lib/python3.11/site-packages/attr/__pycache__/__init__.cpython-311.pyc b/py311/lib/python3.11/site-packages/attr/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..57630b80a5266640a7741a0d708300cd0c7fb528
Binary files /dev/null and b/py311/lib/python3.11/site-packages/attr/__pycache__/__init__.cpython-311.pyc differ
diff --git a/py311/lib/python3.11/site-packages/attr/__pycache__/_cmp.cpython-311.pyc b/py311/lib/python3.11/site-packages/attr/__pycache__/_cmp.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..af74683ca3e688fe17e0b556bdd5a0368809a5da
Binary files /dev/null and b/py311/lib/python3.11/site-packages/attr/__pycache__/_cmp.cpython-311.pyc differ
diff --git a/py311/lib/python3.11/site-packages/attr/__pycache__/_compat.cpython-311.pyc b/py311/lib/python3.11/site-packages/attr/__pycache__/_compat.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..64e72b37435eeaaa12aa53c8d531ebac528de4d9
Binary files /dev/null and b/py311/lib/python3.11/site-packages/attr/__pycache__/_compat.cpython-311.pyc differ
diff --git a/py311/lib/python3.11/site-packages/attr/__pycache__/_config.cpython-311.pyc b/py311/lib/python3.11/site-packages/attr/__pycache__/_config.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4286cbe54864b616f1c96e7361e49b16bf905d72
Binary files /dev/null and b/py311/lib/python3.11/site-packages/attr/__pycache__/_config.cpython-311.pyc differ
diff --git a/py311/lib/python3.11/site-packages/attr/__pycache__/_funcs.cpython-311.pyc b/py311/lib/python3.11/site-packages/attr/__pycache__/_funcs.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..558abef9004625e971222018b033fbd5ec43a285
Binary files /dev/null and b/py311/lib/python3.11/site-packages/attr/__pycache__/_funcs.cpython-311.pyc differ
diff --git a/py311/lib/python3.11/site-packages/attr/__pycache__/_next_gen.cpython-311.pyc b/py311/lib/python3.11/site-packages/attr/__pycache__/_next_gen.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f6268119d3ee832e953e8e53c43a790320f24bc0
Binary files /dev/null and b/py311/lib/python3.11/site-packages/attr/__pycache__/_next_gen.cpython-311.pyc differ
diff --git a/py311/lib/python3.11/site-packages/attr/__pycache__/_version_info.cpython-311.pyc b/py311/lib/python3.11/site-packages/attr/__pycache__/_version_info.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7837624c8755cd27857ed47786e7715c12a4c107
Binary files /dev/null and b/py311/lib/python3.11/site-packages/attr/__pycache__/_version_info.cpython-311.pyc differ
diff --git a/py311/lib/python3.11/site-packages/attr/__pycache__/converters.cpython-311.pyc b/py311/lib/python3.11/site-packages/attr/__pycache__/converters.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..eccd88d1a3a0f1de2ba09515fe96ad494b6e0420
Binary files /dev/null and b/py311/lib/python3.11/site-packages/attr/__pycache__/converters.cpython-311.pyc differ
diff --git a/py311/lib/python3.11/site-packages/attr/__pycache__/exceptions.cpython-311.pyc b/py311/lib/python3.11/site-packages/attr/__pycache__/exceptions.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7d8eb7e4359bdd3df0ee90ca329251009be13e0d
Binary files /dev/null and b/py311/lib/python3.11/site-packages/attr/__pycache__/exceptions.cpython-311.pyc differ
diff --git a/py311/lib/python3.11/site-packages/attr/__pycache__/filters.cpython-311.pyc b/py311/lib/python3.11/site-packages/attr/__pycache__/filters.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..774d47777d53ce5823fe21802e37ae2caad5b860
Binary files /dev/null and b/py311/lib/python3.11/site-packages/attr/__pycache__/filters.cpython-311.pyc differ
diff --git a/py311/lib/python3.11/site-packages/attr/__pycache__/setters.cpython-311.pyc b/py311/lib/python3.11/site-packages/attr/__pycache__/setters.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0e59cf67598bd57cfd8f3cb5320272c81d467cbd
Binary files /dev/null and b/py311/lib/python3.11/site-packages/attr/__pycache__/setters.cpython-311.pyc differ
diff --git a/py311/lib/python3.11/site-packages/attr/__pycache__/validators.cpython-311.pyc b/py311/lib/python3.11/site-packages/attr/__pycache__/validators.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..98270e24ce65dd6801487457d5e25e3adb4d2e5d
Binary files /dev/null and b/py311/lib/python3.11/site-packages/attr/__pycache__/validators.cpython-311.pyc differ
diff --git a/py311/lib/python3.11/site-packages/attr/_cmp.py b/py311/lib/python3.11/site-packages/attr/_cmp.py
new file mode 100644
index 0000000000000000000000000000000000000000..09bab491f83ef4d15129f34b5f5a9e69bb34d63c
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/_cmp.py
@@ -0,0 +1,160 @@
+# SPDX-License-Identifier: MIT
+
+
+import functools
+import types
+
+from ._make import __ne__
+
+
+_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
+
+
+def cmp_using(
+ eq=None,
+ lt=None,
+ le=None,
+ gt=None,
+ ge=None,
+ require_same_type=True,
+ class_name="Comparable",
+):
+ """
+ Create a class that can be passed into `attrs.field`'s ``eq``, ``order``,
+ and ``cmp`` arguments to customize field comparison.
+
+ The resulting class will have a full set of ordering methods if at least
+ one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
+
+ Args:
+ eq (typing.Callable | None):
+ Callable used to evaluate equality of two objects.
+
+ lt (typing.Callable | None):
+ Callable used to evaluate whether one object is less than another
+ object.
+
+ le (typing.Callable | None):
+ Callable used to evaluate whether one object is less than or equal
+ to another object.
+
+ gt (typing.Callable | None):
+ Callable used to evaluate whether one object is greater than
+ another object.
+
+ ge (typing.Callable | None):
+ Callable used to evaluate whether one object is greater than or
+ equal to another object.
+
+ require_same_type (bool):
+ When `True`, equality and ordering methods will return
+ `NotImplemented` if objects are not of the same type.
+
+ class_name (str | None): Name of class. Defaults to "Comparable".
+
+ See `comparison` for more details.
+
+ .. versionadded:: 21.1.0
+ """
+
+ body = {
+ "__slots__": ["value"],
+ "__init__": _make_init(),
+ "_requirements": [],
+ "_is_comparable_to": _is_comparable_to,
+ }
+
+ # Add operations.
+ num_order_functions = 0
+ has_eq_function = False
+
+ if eq is not None:
+ has_eq_function = True
+ body["__eq__"] = _make_operator("eq", eq)
+ body["__ne__"] = __ne__
+
+ if lt is not None:
+ num_order_functions += 1
+ body["__lt__"] = _make_operator("lt", lt)
+
+ if le is not None:
+ num_order_functions += 1
+ body["__le__"] = _make_operator("le", le)
+
+ if gt is not None:
+ num_order_functions += 1
+ body["__gt__"] = _make_operator("gt", gt)
+
+ if ge is not None:
+ num_order_functions += 1
+ body["__ge__"] = _make_operator("ge", ge)
+
+ type_ = types.new_class(
+ class_name, (object,), {}, lambda ns: ns.update(body)
+ )
+
+ # Add same type requirement.
+ if require_same_type:
+ type_._requirements.append(_check_same_type)
+
+ # Add total ordering if at least one operation was defined.
+ if 0 < num_order_functions < 4:
+ if not has_eq_function:
+ # functools.total_ordering requires __eq__ to be defined,
+ # so raise early error here to keep a nice stack.
+ msg = "eq must be define is order to complete ordering from lt, le, gt, ge."
+ raise ValueError(msg)
+ type_ = functools.total_ordering(type_)
+
+ return type_
+
+
+def _make_init():
+ """
+ Create __init__ method.
+ """
+
+ def __init__(self, value):
+ """
+ Initialize object with *value*.
+ """
+ self.value = value
+
+ return __init__
+
+
+def _make_operator(name, func):
+ """
+ Create operator method.
+ """
+
+ def method(self, other):
+ if not self._is_comparable_to(other):
+ return NotImplemented
+
+ result = func(self.value, other.value)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return result
+
+ method.__name__ = f"__{name}__"
+ method.__doc__ = (
+ f"Return a {_operation_names[name]} b. Computed by attrs."
+ )
+
+ return method
+
+
+def _is_comparable_to(self, other):
+ """
+ Check whether `other` is comparable to `self`.
+ """
+ return all(func(self, other) for func in self._requirements)
+
+
+def _check_same_type(self, other):
+ """
+ Return True if *self* and *other* are of the same type, False otherwise.
+ """
+ return other.value.__class__ is self.value.__class__
diff --git a/py311/lib/python3.11/site-packages/attr/_cmp.pyi b/py311/lib/python3.11/site-packages/attr/_cmp.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..cc7893b04520afa719b1412c7646c3c1b39bf94b
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/_cmp.pyi
@@ -0,0 +1,13 @@
+from typing import Any, Callable
+
+_CompareWithType = Callable[[Any, Any], bool]
+
+def cmp_using(
+ eq: _CompareWithType | None = ...,
+ lt: _CompareWithType | None = ...,
+ le: _CompareWithType | None = ...,
+ gt: _CompareWithType | None = ...,
+ ge: _CompareWithType | None = ...,
+ require_same_type: bool = ...,
+ class_name: str = ...,
+) -> type: ...
diff --git a/py311/lib/python3.11/site-packages/attr/_compat.py b/py311/lib/python3.11/site-packages/attr/_compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..bc68ed9eaf9853bcc6065e7691209a787bf2eccc
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/_compat.py
@@ -0,0 +1,99 @@
+# SPDX-License-Identifier: MIT
+
+import inspect
+import platform
+import sys
+import threading
+
+from collections.abc import Mapping, Sequence # noqa: F401
+from typing import _GenericAlias
+
+
+PYPY = platform.python_implementation() == "PyPy"
+PY_3_10_PLUS = sys.version_info[:2] >= (3, 10)
+PY_3_11_PLUS = sys.version_info[:2] >= (3, 11)
+PY_3_12_PLUS = sys.version_info[:2] >= (3, 12)
+PY_3_13_PLUS = sys.version_info[:2] >= (3, 13)
+PY_3_14_PLUS = sys.version_info[:2] >= (3, 14)
+
+
+if PY_3_14_PLUS:
+ import annotationlib
+
+ # We request forward-ref annotations to not break in the presence of
+ # forward references.
+
+ def _get_annotations(cls):
+ return annotationlib.get_annotations(
+ cls, format=annotationlib.Format.FORWARDREF
+ )
+
+else:
+
+ def _get_annotations(cls):
+ """
+ Get annotations for *cls*.
+ """
+ return cls.__dict__.get("__annotations__", {})
+
+
+class _AnnotationExtractor:
+ """
+ Extract type annotations from a callable, returning None whenever there
+ is none.
+ """
+
+ __slots__ = ["sig"]
+
+ def __init__(self, callable):
+ try:
+ self.sig = inspect.signature(callable)
+ except (ValueError, TypeError): # inspect failed
+ self.sig = None
+
+ def get_first_param_type(self):
+ """
+ Return the type annotation of the first argument if it's not empty.
+ """
+ if not self.sig:
+ return None
+
+ params = list(self.sig.parameters.values())
+ if params and params[0].annotation is not inspect.Parameter.empty:
+ return params[0].annotation
+
+ return None
+
+ def get_return_type(self):
+ """
+ Return the return type if it's not empty.
+ """
+ if (
+ self.sig
+ and self.sig.return_annotation is not inspect.Signature.empty
+ ):
+ return self.sig.return_annotation
+
+ return None
+
+
+# Thread-local global to track attrs instances which are already being repr'd.
+# This is needed because there is no other (thread-safe) way to pass info
+# about the instances that are already being repr'd through the call stack
+# in order to ensure we don't perform infinite recursion.
+#
+# For instance, if an instance contains a dict which contains that instance,
+# we need to know that we're already repr'ing the outside instance from within
+# the dict's repr() call.
+#
+# This lives here rather than in _make.py so that the functions in _make.py
+# don't have a direct reference to the thread-local in their globals dict.
+# If they have such a reference, it breaks cloudpickle.
+repr_context = threading.local()
+
+
+def get_generic_base(cl):
+ """If this is a generic class (A[str]), return the generic base for it."""
+ if cl.__class__ is _GenericAlias:
+ return cl.__origin__
+ return None
diff --git a/py311/lib/python3.11/site-packages/attr/_config.py b/py311/lib/python3.11/site-packages/attr/_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b257726fb1e8b95583ecc3eee8d153336dc4089
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/_config.py
@@ -0,0 +1,31 @@
+# SPDX-License-Identifier: MIT
+
+__all__ = ["get_run_validators", "set_run_validators"]
+
+_run_validators = True
+
+
+def set_run_validators(run):
+ """
+ Set whether or not validators are run. By default, they are run.
+
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+ moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
+ instead.
+ """
+ if not isinstance(run, bool):
+ msg = "'run' must be bool."
+ raise TypeError(msg)
+ global _run_validators
+ _run_validators = run
+
+
+def get_run_validators():
+ """
+ Return whether or not validators are run.
+
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+ moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
+ instead.
+ """
+ return _run_validators
diff --git a/py311/lib/python3.11/site-packages/attr/_funcs.py b/py311/lib/python3.11/site-packages/attr/_funcs.py
new file mode 100644
index 0000000000000000000000000000000000000000..1adb50021373d9c09fcb9db0641bbc03248d54a3
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/_funcs.py
@@ -0,0 +1,497 @@
+# SPDX-License-Identifier: MIT
+
+
+import copy
+
+from ._compat import get_generic_base
+from ._make import _OBJ_SETATTR, NOTHING, fields
+from .exceptions import AttrsAttributeNotFoundError
+
+
+_ATOMIC_TYPES = frozenset(
+ {
+ type(None),
+ bool,
+ int,
+ float,
+ str,
+ complex,
+ bytes,
+ type(...),
+ type,
+ range,
+ property,
+ }
+)
+
+
+def asdict(
+ inst,
+ recurse=True,
+ filter=None,
+ dict_factory=dict,
+ retain_collection_types=False,
+ value_serializer=None,
+):
+ """
+ Return the *attrs* attribute values of *inst* as a dict.
+
+ Optionally recurse into other *attrs*-decorated classes.
+
+ Args:
+ inst: Instance of an *attrs*-decorated class.
+
+ recurse (bool): Recurse into classes that are also *attrs*-decorated.
+
+ filter (~typing.Callable):
+ A callable whose return code determines whether an attribute or
+ element is included (`True`) or dropped (`False`). Is called with
+ the `attrs.Attribute` as the first argument and the value as the
+ second argument.
+
+ dict_factory (~typing.Callable):
+ A callable to produce dictionaries from. For example, to produce
+ ordered dictionaries instead of normal Python dictionaries, pass in
+ ``collections.OrderedDict``.
+
+ retain_collection_types (bool):
+ Do not convert to `list` when encountering an attribute whose type
+ is `tuple` or `set`. Only meaningful if *recurse* is `True`.
+
+ value_serializer (typing.Callable | None):
+ A hook that is called for every attribute or dict key/value. It
+ receives the current instance, field and value and must return the
+ (updated) value. The hook is run *after* the optional *filter* has
+ been applied.
+
+ Returns:
+ Return type of *dict_factory*.
+
+ Raises:
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ .. versionadded:: 16.0.0 *dict_factory*
+ .. versionadded:: 16.1.0 *retain_collection_types*
+ .. versionadded:: 20.3.0 *value_serializer*
+ .. versionadded:: 21.3.0
+ If a dict has a collection for a key, it is serialized as a tuple.
+ """
+ attrs = fields(inst.__class__)
+ rv = dict_factory()
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+
+ if value_serializer is not None:
+ v = value_serializer(inst, a, v)
+
+ if recurse is True:
+ value_type = type(v)
+ if value_type in _ATOMIC_TYPES:
+ rv[a.name] = v
+ elif has(value_type):
+ rv[a.name] = asdict(
+ v,
+ recurse=True,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ elif issubclass(value_type, (tuple, list, set, frozenset)):
+ cf = value_type if retain_collection_types is True else list
+ items = [
+ _asdict_anything(
+ i,
+ is_key=False,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ for i in v
+ ]
+ try:
+ rv[a.name] = cf(items)
+ except TypeError:
+ if not issubclass(cf, tuple):
+ raise
+ # Workaround for TypeError: cf.__new__() missing 1 required
+ # positional argument (which appears, for a namedturle)
+ rv[a.name] = cf(*items)
+ elif issubclass(value_type, dict):
+ df = dict_factory
+ rv[a.name] = df(
+ (
+ _asdict_anything(
+ kk,
+ is_key=True,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ _asdict_anything(
+ vv,
+ is_key=False,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ )
+ for kk, vv in v.items()
+ )
+ else:
+ rv[a.name] = v
+ else:
+ rv[a.name] = v
+ return rv
+
+
+def _asdict_anything(
+ val,
+ is_key,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+):
+ """
+ ``asdict`` only works on attrs instances, this works on anything.
+ """
+ val_type = type(val)
+ if val_type in _ATOMIC_TYPES:
+ rv = val
+ if value_serializer is not None:
+ rv = value_serializer(None, None, rv)
+ elif getattr(val_type, "__attrs_attrs__", None) is not None:
+ # Attrs class.
+ rv = asdict(
+ val,
+ recurse=True,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ elif issubclass(val_type, (tuple, list, set, frozenset)):
+ if retain_collection_types is True:
+ cf = val.__class__
+ elif is_key:
+ cf = tuple
+ else:
+ cf = list
+
+ rv = cf(
+ [
+ _asdict_anything(
+ i,
+ is_key=False,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ for i in val
+ ]
+ )
+ elif issubclass(val_type, dict):
+ df = dict_factory
+ rv = df(
+ (
+ _asdict_anything(
+ kk,
+ is_key=True,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ _asdict_anything(
+ vv,
+ is_key=False,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ )
+ for kk, vv in val.items()
+ )
+ else:
+ rv = val
+ if value_serializer is not None:
+ rv = value_serializer(None, None, rv)
+
+ return rv
+
+
+def astuple(
+ inst,
+ recurse=True,
+ filter=None,
+ tuple_factory=tuple,
+ retain_collection_types=False,
+):
+ """
+ Return the *attrs* attribute values of *inst* as a tuple.
+
+ Optionally recurse into other *attrs*-decorated classes.
+
+ Args:
+ inst: Instance of an *attrs*-decorated class.
+
+ recurse (bool):
+ Recurse into classes that are also *attrs*-decorated.
+
+ filter (~typing.Callable):
+ A callable whose return code determines whether an attribute or
+ element is included (`True`) or dropped (`False`). Is called with
+ the `attrs.Attribute` as the first argument and the value as the
+ second argument.
+
+ tuple_factory (~typing.Callable):
+ A callable to produce tuples from. For example, to produce lists
+ instead of tuples.
+
+ retain_collection_types (bool):
+ Do not convert to `list` or `dict` when encountering an attribute
+ which type is `tuple`, `dict` or `set`. Only meaningful if
+ *recurse* is `True`.
+
+ Returns:
+ Return type of *tuple_factory*
+
+ Raises:
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ .. versionadded:: 16.2.0
+ """
+ attrs = fields(inst.__class__)
+ rv = []
+ retain = retain_collection_types # Very long. :/
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+ value_type = type(v)
+ if recurse is True:
+ if value_type in _ATOMIC_TYPES:
+ rv.append(v)
+ elif has(value_type):
+ rv.append(
+ astuple(
+ v,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ )
+ elif issubclass(value_type, (tuple, list, set, frozenset)):
+ cf = v.__class__ if retain is True else list
+ items = [
+ (
+ astuple(
+ j,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(j.__class__)
+ else j
+ )
+ for j in v
+ ]
+ try:
+ rv.append(cf(items))
+ except TypeError:
+ if not issubclass(cf, tuple):
+ raise
+ # Workaround for TypeError: cf.__new__() missing 1 required
+ # positional argument (which appears, for a namedturle)
+ rv.append(cf(*items))
+ elif issubclass(value_type, dict):
+ df = value_type if retain is True else dict
+ rv.append(
+ df(
+ (
+ (
+ astuple(
+ kk,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(kk.__class__)
+ else kk
+ ),
+ (
+ astuple(
+ vv,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(vv.__class__)
+ else vv
+ ),
+ )
+ for kk, vv in v.items()
+ )
+ )
+ else:
+ rv.append(v)
+ else:
+ rv.append(v)
+
+ return rv if tuple_factory is list else tuple_factory(rv)
+
+
+def has(cls):
+ """
+ Check whether *cls* is a class with *attrs* attributes.
+
+ Args:
+ cls (type): Class to introspect.
+
+ Raises:
+ TypeError: If *cls* is not a class.
+
+ Returns:
+ bool:
+ """
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is not None:
+ return True
+
+ # No attrs, maybe it's a specialized generic (A[str])?
+ generic_base = get_generic_base(cls)
+ if generic_base is not None:
+ generic_attrs = getattr(generic_base, "__attrs_attrs__", None)
+ if generic_attrs is not None:
+ # Stick it on here for speed next time.
+ cls.__attrs_attrs__ = generic_attrs
+ return generic_attrs is not None
+ return False
+
+
+def assoc(inst, **changes):
+ """
+ Copy *inst* and apply *changes*.
+
+ This is different from `evolve` that applies the changes to the arguments
+ that create the new instance.
+
+ `evolve`'s behavior is preferable, but there are `edge cases`_ where it
+ doesn't work. Therefore `assoc` is deprecated, but will not be removed.
+
+ .. _`edge cases`: https://github.com/python-attrs/attrs/issues/251
+
+ Args:
+ inst: Instance of a class with *attrs* attributes.
+
+ changes: Keyword changes in the new copy.
+
+ Returns:
+ A copy of inst with *changes* incorporated.
+
+ Raises:
+ attrs.exceptions.AttrsAttributeNotFoundError:
+ If *attr_name* couldn't be found on *cls*.
+
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ .. deprecated:: 17.1.0
+ Use `attrs.evolve` instead if you can. This function will not be
+ removed du to the slightly different approach compared to
+ `attrs.evolve`, though.
+ """
+ new = copy.copy(inst)
+ attrs = fields(inst.__class__)
+ for k, v in changes.items():
+ a = getattr(attrs, k, NOTHING)
+ if a is NOTHING:
+ msg = f"{k} is not an attrs attribute on {new.__class__}."
+ raise AttrsAttributeNotFoundError(msg)
+ _OBJ_SETATTR(new, k, v)
+ return new
+
+
+def resolve_types(
+ cls, globalns=None, localns=None, attribs=None, include_extras=True
+):
+ """
+ Resolve any strings and forward annotations in type annotations.
+
+ This is only required if you need concrete types in :class:`Attribute`'s
+ *type* field. In other words, you don't need to resolve your types if you
+ only use them for static type checking.
+
+ With no arguments, names will be looked up in the module in which the class
+ was created. If this is not what you want, for example, if the name only
+ exists inside a method, you may pass *globalns* or *localns* to specify
+ other dictionaries in which to look up these names. See the docs of
+ `typing.get_type_hints` for more details.
+
+ Args:
+ cls (type): Class to resolve.
+
+ globalns (dict | None): Dictionary containing global variables.
+
+ localns (dict | None): Dictionary containing local variables.
+
+ attribs (list | None):
+ List of attribs for the given class. This is necessary when calling
+ from inside a ``field_transformer`` since *cls* is not an *attrs*
+ class yet.
+
+ include_extras (bool):
+ Resolve more accurately, if possible. Pass ``include_extras`` to
+ ``typing.get_hints``, if supported by the typing module. On
+ supported Python versions (3.9+), this resolves the types more
+ accurately.
+
+ Raises:
+ TypeError: If *cls* is not a class.
+
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class and you didn't pass any attribs.
+
+ NameError: If types cannot be resolved because of missing variables.
+
+ Returns:
+ *cls* so you can use this function also as a class decorator. Please
+ note that you have to apply it **after** `attrs.define`. That means the
+ decorator has to come in the line **before** `attrs.define`.
+
+ .. versionadded:: 20.1.0
+ .. versionadded:: 21.1.0 *attribs*
+ .. versionadded:: 23.1.0 *include_extras*
+ """
+ # Since calling get_type_hints is expensive we cache whether we've
+ # done it already.
+ if getattr(cls, "__attrs_types_resolved__", None) != cls:
+ import typing
+
+ kwargs = {
+ "globalns": globalns,
+ "localns": localns,
+ "include_extras": include_extras,
+ }
+
+ hints = typing.get_type_hints(cls, **kwargs)
+ for field in fields(cls) if attribs is None else attribs:
+ if field.name in hints:
+ # Since fields have been frozen we must work around it.
+ _OBJ_SETATTR(field, "type", hints[field.name])
+ # We store the class we resolved so that subclasses know they haven't
+ # been resolved.
+ cls.__attrs_types_resolved__ = cls
+
+ # Return the class so you can use it as a decorator too.
+ return cls
diff --git a/py311/lib/python3.11/site-packages/attr/_make.py b/py311/lib/python3.11/site-packages/attr/_make.py
new file mode 100644
index 0000000000000000000000000000000000000000..d24d9ba98575c3c4beb3186ca0335d2175663c7f
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/_make.py
@@ -0,0 +1,3362 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import annotations
+
+import abc
+import contextlib
+import copy
+import enum
+import inspect
+import itertools
+import linecache
+import sys
+import types
+import unicodedata
+import weakref
+
+from collections.abc import Callable, Mapping
+from functools import cached_property
+from typing import Any, NamedTuple, TypeVar
+
+# We need to import _compat itself in addition to the _compat members to avoid
+# having the thread-local in the globals here.
+from . import _compat, _config, setters
+from ._compat import (
+ PY_3_10_PLUS,
+ PY_3_11_PLUS,
+ PY_3_13_PLUS,
+ _AnnotationExtractor,
+ _get_annotations,
+ get_generic_base,
+)
+from .exceptions import (
+ DefaultAlreadySetError,
+ FrozenInstanceError,
+ NotAnAttrsClassError,
+ UnannotatedAttributeError,
+)
+
+
+# This is used at least twice, so cache it here.
+_OBJ_SETATTR = object.__setattr__
+_INIT_FACTORY_PAT = "__attr_factory_%s"
+_CLASSVAR_PREFIXES = (
+ "typing.ClassVar",
+ "t.ClassVar",
+ "ClassVar",
+ "typing_extensions.ClassVar",
+)
+# we don't use a double-underscore prefix because that triggers
+# name mangling when trying to create a slot for the field
+# (when slots=True)
+_HASH_CACHE_FIELD = "_attrs_cached_hash"
+
+_EMPTY_METADATA_SINGLETON = types.MappingProxyType({})
+
+# Unique object for unequivocal getattr() defaults.
+_SENTINEL = object()
+
+_DEFAULT_ON_SETATTR = setters.pipe(setters.convert, setters.validate)
+
+
+class _Nothing(enum.Enum):
+ """
+ Sentinel to indicate the lack of a value when `None` is ambiguous.
+
+ If extending attrs, you can use ``typing.Literal[NOTHING]`` to show
+ that a value may be ``NOTHING``.
+
+ .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False.
+ .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant.
+ """
+
+ NOTHING = enum.auto()
+
+ def __repr__(self):
+ return "NOTHING"
+
+ def __bool__(self):
+ return False
+
+
+NOTHING = _Nothing.NOTHING
+"""
+Sentinel to indicate the lack of a value when `None` is ambiguous.
+
+When using in 3rd party code, use `attrs.NothingType` for type annotations.
+"""
+
+
+class _CacheHashWrapper(int):
+ """
+ An integer subclass that pickles / copies as None
+
+ This is used for non-slots classes with ``cache_hash=True``, to avoid
+ serializing a potentially (even likely) invalid hash value. Since `None`
+ is the default value for uncalculated hashes, whenever this is copied,
+ the copy's value for the hash should automatically reset.
+
+ See GH #613 for more details.
+ """
+
+ def __reduce__(self, _none_constructor=type(None), _args=()): # noqa: B008
+ return _none_constructor, _args
+
+
+def attrib(
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=None,
+ init=True,
+ metadata=None,
+ type=None,
+ converter=None,
+ factory=None,
+ kw_only=None,
+ eq=None,
+ order=None,
+ on_setattr=None,
+ alias=None,
+):
+ """
+ Create a new field / attribute on a class.
+
+ Identical to `attrs.field`, except it's not keyword-only.
+
+ Consider using `attrs.field` in new code (``attr.ib`` will *never* go away,
+ though).
+
+ .. warning::
+
+ Does **nothing** unless the class is also decorated with
+ `attr.s` (or similar)!
+
+
+ .. versionadded:: 15.2.0 *convert*
+ .. versionadded:: 16.3.0 *metadata*
+ .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
+ .. versionchanged:: 17.1.0
+ *hash* is `None` and therefore mirrors *eq* by default.
+ .. versionadded:: 17.3.0 *type*
+ .. deprecated:: 17.4.0 *convert*
+ .. versionadded:: 17.4.0
+ *converter* as a replacement for the deprecated *convert* to achieve
+ consistency with other noun-based arguments.
+ .. versionadded:: 18.1.0
+ ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionchanged:: 19.2.0 *convert* keyword argument removed.
+ .. versionchanged:: 19.2.0 *repr* also accepts a custom callable.
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.3.0 *kw_only* backported to Python 2
+ .. versionchanged:: 21.1.0
+ *eq*, *order*, and *cmp* also accept a custom callable
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ .. versionadded:: 22.2.0 *alias*
+ .. versionchanged:: 25.4.0
+ *kw_only* can now be None, and its default is also changed from False to
+ None.
+ """
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq, order, True
+ )
+
+ if hash is not None and hash is not True and hash is not False:
+ msg = "Invalid value for hash. Must be True, False, or None."
+ raise TypeError(msg)
+
+ if factory is not None:
+ if default is not NOTHING:
+ msg = (
+ "The `default` and `factory` arguments are mutually exclusive."
+ )
+ raise ValueError(msg)
+ if not callable(factory):
+ msg = "The `factory` argument must be a callable."
+ raise ValueError(msg)
+ default = Factory(factory)
+
+ if metadata is None:
+ metadata = {}
+
+ # Apply syntactic sugar by auto-wrapping.
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ if validator and isinstance(validator, (list, tuple)):
+ validator = and_(*validator)
+
+ if converter and isinstance(converter, (list, tuple)):
+ converter = pipe(*converter)
+
+ return _CountingAttr(
+ default=default,
+ validator=validator,
+ repr=repr,
+ cmp=None,
+ hash=hash,
+ init=init,
+ converter=converter,
+ metadata=metadata,
+ type=type,
+ kw_only=kw_only,
+ eq=eq,
+ eq_key=eq_key,
+ order=order,
+ order_key=order_key,
+ on_setattr=on_setattr,
+ alias=alias,
+ )
+
+
+def _compile_and_eval(
+ script: str,
+ globs: dict[str, Any] | None,
+ locs: Mapping[str, object] | None = None,
+ filename: str = "",
+) -> None:
+ """
+ Evaluate the script with the given global (globs) and local (locs)
+ variables.
+ """
+ bytecode = compile(script, filename, "exec")
+ eval(bytecode, globs, locs)
+
+
+def _linecache_and_compile(
+ script: str,
+ filename: str,
+ globs: dict[str, Any] | None,
+ locals: Mapping[str, object] | None = None,
+) -> dict[str, Any]:
+ """
+ Cache the script with _linecache_, compile it and return the _locals_.
+ """
+
+ locs = {} if locals is None else locals
+
+ # In order of debuggers like PDB being able to step through the code,
+ # we add a fake linecache entry.
+ count = 1
+ base_filename = filename
+ while True:
+ linecache_tuple = (
+ len(script),
+ None,
+ script.splitlines(True),
+ filename,
+ )
+ old_val = linecache.cache.setdefault(filename, linecache_tuple)
+ if old_val == linecache_tuple:
+ break
+
+ filename = f"{base_filename[:-1]}-{count}>"
+ count += 1
+
+ _compile_and_eval(script, globs, locs, filename)
+
+ return locs
+
+
+def _make_attr_tuple_class(cls_name: str, attr_names: list[str]) -> type:
+ """
+ Create a tuple subclass to hold `Attribute`s for an `attrs` class.
+
+ The subclass is a bare tuple with properties for names.
+
+ class MyClassAttributes(tuple):
+ __slots__ = ()
+ x = property(itemgetter(0))
+ """
+ attr_class_name = f"{cls_name}Attributes"
+ body = {}
+ for i, attr_name in enumerate(attr_names):
+
+ def getter(self, i=i):
+ return self[i]
+
+ body[attr_name] = property(getter)
+ return type(attr_class_name, (tuple,), body)
+
+
+# Tuple class for extracted attributes from a class definition.
+# `base_attrs` is a subset of `attrs`.
+class _Attributes(NamedTuple):
+ attrs: type
+ base_attrs: list[Attribute]
+ base_attrs_map: dict[str, type]
+
+
+def _is_class_var(annot):
+ """
+ Check whether *annot* is a typing.ClassVar.
+
+ The string comparison hack is used to avoid evaluating all string
+ annotations which would put attrs-based classes at a performance
+ disadvantage compared to plain old classes.
+ """
+ annot = str(annot)
+
+ # Annotation can be quoted.
+ if annot.startswith(("'", '"')) and annot.endswith(("'", '"')):
+ annot = annot[1:-1]
+
+ return annot.startswith(_CLASSVAR_PREFIXES)
+
+
+def _has_own_attribute(cls, attrib_name):
+ """
+ Check whether *cls* defines *attrib_name* (and doesn't just inherit it).
+ """
+ return attrib_name in cls.__dict__
+
+
+def _collect_base_attrs(
+ cls, taken_attr_names
+) -> tuple[list[Attribute], dict[str, type]]:
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in reversed(cls.__mro__[1:-1]):
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.inherited or a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True) # noqa: PLW2901
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ # For each name, only keep the freshest definition i.e. the furthest at the
+ # back. base_attr_map is fine because it gets overwritten with every new
+ # instance.
+ filtered = []
+ seen = set()
+ for a in reversed(base_attrs):
+ if a.name in seen:
+ continue
+ filtered.insert(0, a)
+ seen.add(a.name)
+
+ return filtered, base_attr_map
+
+
+def _collect_base_attrs_broken(cls, taken_attr_names):
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+
+ N.B. *taken_attr_names* will be mutated.
+
+ Adhere to the old incorrect behavior.
+
+ Notably it collects from the front and considers inherited attributes which
+ leads to the buggy behavior reported in #428.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in cls.__mro__[1:-1]:
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True) # noqa: PLW2901
+ taken_attr_names.add(a.name)
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ return base_attrs, base_attr_map
+
+
+def _transform_attrs(
+ cls,
+ these,
+ auto_attribs,
+ kw_only,
+ collect_by_mro,
+ field_transformer,
+) -> _Attributes:
+ """
+ Transform all `_CountingAttr`s on a class into `Attribute`s.
+
+ If *these* is passed, use that and don't look for them on the class.
+
+ If *collect_by_mro* is True, collect them in the correct MRO order,
+ otherwise use the old -- incorrect -- order. See #428.
+
+ Return an `_Attributes`.
+ """
+ cd = cls.__dict__
+ anns = _get_annotations(cls)
+
+ if these is not None:
+ ca_list = list(these.items())
+ elif auto_attribs is True:
+ ca_names = {
+ name
+ for name, attr in cd.items()
+ if attr.__class__ is _CountingAttr
+ }
+ ca_list = []
+ annot_names = set()
+ for attr_name, type in anns.items():
+ if _is_class_var(type):
+ continue
+ annot_names.add(attr_name)
+ a = cd.get(attr_name, NOTHING)
+
+ if a.__class__ is not _CountingAttr:
+ a = attrib(a)
+ ca_list.append((attr_name, a))
+
+ unannotated = ca_names - annot_names
+ if unannotated:
+ raise UnannotatedAttributeError(
+ "The following `attr.ib`s lack a type annotation: "
+ + ", ".join(
+ sorted(unannotated, key=lambda n: cd.get(n).counter)
+ )
+ + "."
+ )
+ else:
+ ca_list = sorted(
+ (
+ (name, attr)
+ for name, attr in cd.items()
+ if attr.__class__ is _CountingAttr
+ ),
+ key=lambda e: e[1].counter,
+ )
+
+ fca = Attribute.from_counting_attr
+ no = ClassProps.KeywordOnly.NO
+ own_attrs = [
+ fca(
+ attr_name,
+ ca,
+ kw_only is not no,
+ anns.get(attr_name),
+ )
+ for attr_name, ca in ca_list
+ ]
+
+ if collect_by_mro:
+ base_attrs, base_attr_map = _collect_base_attrs(
+ cls, {a.name for a in own_attrs}
+ )
+ else:
+ base_attrs, base_attr_map = _collect_base_attrs_broken(
+ cls, {a.name for a in own_attrs}
+ )
+
+ if kw_only is ClassProps.KeywordOnly.FORCE:
+ own_attrs = [a.evolve(kw_only=True) for a in own_attrs]
+ base_attrs = [a.evolve(kw_only=True) for a in base_attrs]
+
+ attrs = base_attrs + own_attrs
+
+ if field_transformer is not None:
+ attrs = tuple(field_transformer(cls, attrs))
+
+ # Check attr order after executing the field_transformer.
+ # Mandatory vs non-mandatory attr order only matters when they are part of
+ # the __init__ signature and when they aren't kw_only (which are moved to
+ # the end and can be mandatory or non-mandatory in any order, as they will
+ # be specified as keyword args anyway). Check the order of those attrs:
+ had_default = False
+ for a in (a for a in attrs if a.init is not False and a.kw_only is False):
+ if had_default is True and a.default is NOTHING:
+ msg = f"No mandatory attributes allowed after an attribute with a default value or factory. Attribute in question: {a!r}"
+ raise ValueError(msg)
+
+ if had_default is False and a.default is not NOTHING:
+ had_default = True
+
+ # Resolve default field alias after executing field_transformer.
+ # This allows field_transformer to differentiate between explicit vs
+ # default aliases and supply their own defaults.
+ for a in attrs:
+ if not a.alias:
+ # Evolve is very slow, so we hold our nose and do it dirty.
+ _OBJ_SETATTR.__get__(a)("alias", _default_init_alias_for(a.name))
+
+ # Create AttrsClass *after* applying the field_transformer since it may
+ # add or remove attributes!
+ attr_names = [a.name for a in attrs]
+ AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
+
+ return _Attributes(AttrsClass(attrs), base_attrs, base_attr_map)
+
+
+def _make_cached_property_getattr(cached_properties, original_getattr, cls):
+ lines = [
+ # Wrapped to get `__class__` into closure cell for super()
+ # (It will be replaced with the newly constructed class after construction).
+ "def wrapper(_cls):",
+ " __class__ = _cls",
+ " def __getattr__(self, item, cached_properties=cached_properties, original_getattr=original_getattr, _cached_setattr_get=_cached_setattr_get):",
+ " func = cached_properties.get(item)",
+ " if func is not None:",
+ " result = func(self)",
+ " _setter = _cached_setattr_get(self)",
+ " _setter(item, result)",
+ " return result",
+ ]
+ if original_getattr is not None:
+ lines.append(
+ " return original_getattr(self, item)",
+ )
+ else:
+ lines.extend(
+ [
+ " try:",
+ " return super().__getattribute__(item)",
+ " except AttributeError:",
+ " if not hasattr(super(), '__getattr__'):",
+ " raise",
+ " return super().__getattr__(item)",
+ " original_error = f\"'{self.__class__.__name__}' object has no attribute '{item}'\"",
+ " raise AttributeError(original_error)",
+ ]
+ )
+
+ lines.extend(
+ [
+ " return __getattr__",
+ "__getattr__ = wrapper(_cls)",
+ ]
+ )
+
+ unique_filename = _generate_unique_filename(cls, "getattr")
+
+ glob = {
+ "cached_properties": cached_properties,
+ "_cached_setattr_get": _OBJ_SETATTR.__get__,
+ "original_getattr": original_getattr,
+ }
+
+ return _linecache_and_compile(
+ "\n".join(lines), unique_filename, glob, locals={"_cls": cls}
+ )["__getattr__"]
+
+
+def _frozen_setattrs(self, name, value):
+ """
+ Attached to frozen classes as __setattr__.
+ """
+ if isinstance(self, BaseException) and name in (
+ "__cause__",
+ "__context__",
+ "__traceback__",
+ "__suppress_context__",
+ "__notes__",
+ ):
+ BaseException.__setattr__(self, name, value)
+ return
+
+ raise FrozenInstanceError
+
+
+def _frozen_delattrs(self, name):
+ """
+ Attached to frozen classes as __delattr__.
+ """
+ if isinstance(self, BaseException) and name in ("__notes__",):
+ BaseException.__delattr__(self, name)
+ return
+
+ raise FrozenInstanceError
+
+
+def evolve(*args, **changes):
+ """
+ Create a new instance, based on the first positional argument with
+ *changes* applied.
+
+ .. tip::
+
+ On Python 3.13 and later, you can also use `copy.replace` instead.
+
+ Args:
+
+ inst:
+ Instance of a class with *attrs* attributes. *inst* must be passed
+ as a positional argument.
+
+ changes:
+ Keyword changes in the new copy.
+
+ Returns:
+ A copy of inst with *changes* incorporated.
+
+ Raises:
+ TypeError:
+ If *attr_name* couldn't be found in the class ``__init__``.
+
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ .. versionadded:: 17.1.0
+ .. deprecated:: 23.1.0
+ It is now deprecated to pass the instance using the keyword argument
+ *inst*. It will raise a warning until at least April 2024, after which
+ it will become an error. Always pass the instance as a positional
+ argument.
+ .. versionchanged:: 24.1.0
+ *inst* can't be passed as a keyword argument anymore.
+ """
+ try:
+ (inst,) = args
+ except ValueError:
+ msg = (
+ f"evolve() takes 1 positional argument, but {len(args)} were given"
+ )
+ raise TypeError(msg) from None
+
+ cls = inst.__class__
+ attrs = fields(cls)
+ for a in attrs:
+ if not a.init:
+ continue
+ attr_name = a.name # To deal with private attributes.
+ init_name = a.alias
+ if init_name not in changes:
+ changes[init_name] = getattr(inst, attr_name)
+
+ return cls(**changes)
+
+
+class _ClassBuilder:
+ """
+ Iteratively build *one* class.
+ """
+
+ __slots__ = (
+ "_add_method_dunders",
+ "_attr_names",
+ "_attrs",
+ "_base_attr_map",
+ "_base_names",
+ "_cache_hash",
+ "_cls",
+ "_cls_dict",
+ "_delete_attribs",
+ "_frozen",
+ "_has_custom_setattr",
+ "_has_post_init",
+ "_has_pre_init",
+ "_is_exc",
+ "_on_setattr",
+ "_pre_init_has_args",
+ "_repr_added",
+ "_script_snippets",
+ "_slots",
+ "_weakref_slot",
+ "_wrote_own_setattr",
+ )
+
+ def __init__(
+ self,
+ cls: type,
+ these,
+ auto_attribs: bool,
+ props: ClassProps,
+ has_custom_setattr: bool,
+ ):
+ attrs, base_attrs, base_map = _transform_attrs(
+ cls,
+ these,
+ auto_attribs,
+ props.kw_only,
+ props.collected_fields_by_mro,
+ props.field_transformer,
+ )
+
+ self._cls = cls
+ self._cls_dict = dict(cls.__dict__) if props.is_slotted else {}
+ self._attrs = attrs
+ self._base_names = {a.name for a in base_attrs}
+ self._base_attr_map = base_map
+ self._attr_names = tuple(a.name for a in attrs)
+ self._slots = props.is_slotted
+ self._frozen = props.is_frozen
+ self._weakref_slot = props.has_weakref_slot
+ self._cache_hash = (
+ props.hashability is ClassProps.Hashability.HASHABLE_CACHED
+ )
+ self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
+ self._pre_init_has_args = False
+ if self._has_pre_init:
+ # Check if the pre init method has more arguments than just `self`
+ # We want to pass arguments if pre init expects arguments
+ pre_init_func = cls.__attrs_pre_init__
+ pre_init_signature = inspect.signature(pre_init_func)
+ self._pre_init_has_args = len(pre_init_signature.parameters) > 1
+ self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
+ self._delete_attribs = not bool(these)
+ self._is_exc = props.is_exception
+ self._on_setattr = props.on_setattr_hook
+
+ self._has_custom_setattr = has_custom_setattr
+ self._wrote_own_setattr = False
+
+ self._cls_dict["__attrs_attrs__"] = self._attrs
+ self._cls_dict["__attrs_props__"] = props
+
+ if props.is_frozen:
+ self._cls_dict["__setattr__"] = _frozen_setattrs
+ self._cls_dict["__delattr__"] = _frozen_delattrs
+
+ self._wrote_own_setattr = True
+ elif self._on_setattr in (
+ _DEFAULT_ON_SETATTR,
+ setters.validate,
+ setters.convert,
+ ):
+ has_validator = has_converter = False
+ for a in attrs:
+ if a.validator is not None:
+ has_validator = True
+ if a.converter is not None:
+ has_converter = True
+
+ if has_validator and has_converter:
+ break
+ if (
+ (
+ self._on_setattr == _DEFAULT_ON_SETATTR
+ and not (has_validator or has_converter)
+ )
+ or (self._on_setattr == setters.validate and not has_validator)
+ or (self._on_setattr == setters.convert and not has_converter)
+ ):
+ # If class-level on_setattr is set to convert + validate, but
+ # there's no field to convert or validate, pretend like there's
+ # no on_setattr.
+ self._on_setattr = None
+
+ if props.added_pickling:
+ (
+ self._cls_dict["__getstate__"],
+ self._cls_dict["__setstate__"],
+ ) = self._make_getstate_setstate()
+
+ # tuples of script, globs, hook
+ self._script_snippets: list[
+ tuple[str, dict, Callable[[dict, dict], Any]]
+ ] = []
+ self._repr_added = False
+
+ # We want to only do this check once; in 99.9% of cases these
+ # exist.
+ if not hasattr(self._cls, "__module__") or not hasattr(
+ self._cls, "__qualname__"
+ ):
+ self._add_method_dunders = self._add_method_dunders_safe
+ else:
+ self._add_method_dunders = self._add_method_dunders_unsafe
+
+ def __repr__(self):
+ return f"<_ClassBuilder(cls={self._cls.__name__})>"
+
+ def _eval_snippets(self) -> None:
+ """
+ Evaluate any registered snippets in one go.
+ """
+ script = "\n".join([snippet[0] for snippet in self._script_snippets])
+ globs = {}
+ for _, snippet_globs, _ in self._script_snippets:
+ globs.update(snippet_globs)
+
+ locs = _linecache_and_compile(
+ script,
+ _generate_unique_filename(self._cls, "methods"),
+ globs,
+ )
+
+ for _, _, hook in self._script_snippets:
+ hook(self._cls_dict, locs)
+
+ def build_class(self):
+ """
+ Finalize class based on the accumulated configuration.
+
+ Builder cannot be used after calling this method.
+ """
+ self._eval_snippets()
+ if self._slots is True:
+ cls = self._create_slots_class()
+ self._cls.__attrs_base_of_slotted__ = weakref.ref(cls)
+ else:
+ cls = self._patch_original_class()
+ if PY_3_10_PLUS:
+ cls = abc.update_abstractmethods(cls)
+
+ # The method gets only called if it's not inherited from a base class.
+ # _has_own_attribute does NOT work properly for classmethods.
+ if (
+ getattr(cls, "__attrs_init_subclass__", None)
+ and "__attrs_init_subclass__" not in cls.__dict__
+ ):
+ cls.__attrs_init_subclass__()
+
+ return cls
+
+ def _patch_original_class(self):
+ """
+ Apply accumulated methods and return the class.
+ """
+ cls = self._cls
+ base_names = self._base_names
+
+ # Clean class of attribute definitions (`attr.ib()`s).
+ if self._delete_attribs:
+ for name in self._attr_names:
+ if (
+ name not in base_names
+ and getattr(cls, name, _SENTINEL) is not _SENTINEL
+ ):
+ # An AttributeError can happen if a base class defines a
+ # class variable and we want to set an attribute with the
+ # same name by using only a type annotation.
+ with contextlib.suppress(AttributeError):
+ delattr(cls, name)
+
+ # Attach our dunder methods.
+ for name, value in self._cls_dict.items():
+ setattr(cls, name, value)
+
+ # If we've inherited an attrs __setattr__ and don't write our own,
+ # reset it to object's.
+ if not self._wrote_own_setattr and getattr(
+ cls, "__attrs_own_setattr__", False
+ ):
+ cls.__attrs_own_setattr__ = False
+
+ if not self._has_custom_setattr:
+ cls.__setattr__ = _OBJ_SETATTR
+
+ return cls
+
+ def _create_slots_class(self):
+ """
+ Build and return a new class with a `__slots__` attribute.
+ """
+ cd = {
+ k: v
+ for k, v in self._cls_dict.items()
+ if k not in (*tuple(self._attr_names), "__dict__", "__weakref__")
+ }
+
+ # 3.14.0rc2+
+ if hasattr(sys, "_clear_type_descriptors"):
+ sys._clear_type_descriptors(self._cls)
+
+ # If our class doesn't have its own implementation of __setattr__
+ # (either from the user or by us), check the bases, if one of them has
+ # an attrs-made __setattr__, that needs to be reset. We don't walk the
+ # MRO because we only care about our immediate base classes.
+ # XXX: This can be confused by subclassing a slotted attrs class with
+ # XXX: a non-attrs class and subclass the resulting class with an attrs
+ # XXX: class. See `test_slotted_confused` for details. For now that's
+ # XXX: OK with us.
+ if not self._wrote_own_setattr:
+ cd["__attrs_own_setattr__"] = False
+
+ if not self._has_custom_setattr:
+ for base_cls in self._cls.__bases__:
+ if base_cls.__dict__.get("__attrs_own_setattr__", False):
+ cd["__setattr__"] = _OBJ_SETATTR
+ break
+
+ # Traverse the MRO to collect existing slots
+ # and check for an existing __weakref__.
+ existing_slots = {}
+ weakref_inherited = False
+ for base_cls in self._cls.__mro__[1:-1]:
+ if base_cls.__dict__.get("__weakref__", None) is not None:
+ weakref_inherited = True
+ existing_slots.update(
+ {
+ name: getattr(base_cls, name)
+ for name in getattr(base_cls, "__slots__", [])
+ }
+ )
+
+ base_names = set(self._base_names)
+
+ names = self._attr_names
+ if (
+ self._weakref_slot
+ and "__weakref__" not in getattr(self._cls, "__slots__", ())
+ and "__weakref__" not in names
+ and not weakref_inherited
+ ):
+ names += ("__weakref__",)
+
+ cached_properties = {
+ name: cached_prop.func
+ for name, cached_prop in cd.items()
+ if isinstance(cached_prop, cached_property)
+ }
+
+ # Collect methods with a `__class__` reference that are shadowed in the new class.
+ # To know to update them.
+ additional_closure_functions_to_update = []
+ if cached_properties:
+ class_annotations = _get_annotations(self._cls)
+ for name, func in cached_properties.items():
+ # Add cached properties to names for slotting.
+ names += (name,)
+ # Clear out function from class to avoid clashing.
+ del cd[name]
+ additional_closure_functions_to_update.append(func)
+ annotation = inspect.signature(func).return_annotation
+ if annotation is not inspect.Parameter.empty:
+ class_annotations[name] = annotation
+
+ original_getattr = cd.get("__getattr__")
+ if original_getattr is not None:
+ additional_closure_functions_to_update.append(original_getattr)
+
+ cd["__getattr__"] = _make_cached_property_getattr(
+ cached_properties, original_getattr, self._cls
+ )
+
+ # We only add the names of attributes that aren't inherited.
+ # Setting __slots__ to inherited attributes wastes memory.
+ slot_names = [name for name in names if name not in base_names]
+
+ # There are slots for attributes from current class
+ # that are defined in parent classes.
+ # As their descriptors may be overridden by a child class,
+ # we collect them here and update the class dict
+ reused_slots = {
+ slot: slot_descriptor
+ for slot, slot_descriptor in existing_slots.items()
+ if slot in slot_names
+ }
+ slot_names = [name for name in slot_names if name not in reused_slots]
+ cd.update(reused_slots)
+ if self._cache_hash:
+ slot_names.append(_HASH_CACHE_FIELD)
+
+ cd["__slots__"] = tuple(slot_names)
+
+ cd["__qualname__"] = self._cls.__qualname__
+
+ # Create new class based on old class and our methods.
+ cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
+
+ # The following is a fix for
+ # .
+ # If a method mentions `__class__` or uses the no-arg super(), the
+ # compiler will bake a reference to the class in the method itself
+ # as `method.__closure__`. Since we replace the class with a
+ # clone, we rewrite these references so it keeps working.
+ for item in itertools.chain(
+ cls.__dict__.values(), additional_closure_functions_to_update
+ ):
+ if isinstance(item, (classmethod, staticmethod)):
+ # Class- and staticmethods hide their functions inside.
+ # These might need to be rewritten as well.
+ closure_cells = getattr(item.__func__, "__closure__", None)
+ elif isinstance(item, property):
+ # Workaround for property `super()` shortcut (PY3-only).
+ # There is no universal way for other descriptors.
+ closure_cells = getattr(item.fget, "__closure__", None)
+ else:
+ closure_cells = getattr(item, "__closure__", None)
+
+ if not closure_cells: # Catch None or the empty list.
+ continue
+ for cell in closure_cells:
+ try:
+ match = cell.cell_contents is self._cls
+ except ValueError: # noqa: PERF203
+ # ValueError: Cell is empty
+ pass
+ else:
+ if match:
+ cell.cell_contents = cls
+ return cls
+
+ def add_repr(self, ns):
+ script, globs = _make_repr_script(self._attrs, ns)
+
+ def _attach_repr(cls_dict, globs):
+ cls_dict["__repr__"] = self._add_method_dunders(globs["__repr__"])
+
+ self._script_snippets.append((script, globs, _attach_repr))
+ self._repr_added = True
+ return self
+
+ def add_str(self):
+ if not self._repr_added:
+ msg = "__str__ can only be generated if a __repr__ exists."
+ raise ValueError(msg)
+
+ def __str__(self):
+ return self.__repr__()
+
+ self._cls_dict["__str__"] = self._add_method_dunders(__str__)
+ return self
+
+ def _make_getstate_setstate(self):
+ """
+ Create custom __setstate__ and __getstate__ methods.
+ """
+ # __weakref__ is not writable.
+ state_attr_names = tuple(
+ an for an in self._attr_names if an != "__weakref__"
+ )
+
+ def slots_getstate(self):
+ """
+ Automatically created by attrs.
+ """
+ return {name: getattr(self, name) for name in state_attr_names}
+
+ hash_caching_enabled = self._cache_hash
+
+ def slots_setstate(self, state):
+ """
+ Automatically created by attrs.
+ """
+ __bound_setattr = _OBJ_SETATTR.__get__(self)
+ if isinstance(state, tuple):
+ # Backward compatibility with attrs instances pickled with
+ # attrs versions before v22.2.0 which stored tuples.
+ for name, value in zip(state_attr_names, state):
+ __bound_setattr(name, value)
+ else:
+ for name in state_attr_names:
+ if name in state:
+ __bound_setattr(name, state[name])
+
+ # The hash code cache is not included when the object is
+ # serialized, but it still needs to be initialized to None to
+ # indicate that the first call to __hash__ should be a cache
+ # miss.
+ if hash_caching_enabled:
+ __bound_setattr(_HASH_CACHE_FIELD, None)
+
+ return slots_getstate, slots_setstate
+
+ def make_unhashable(self):
+ self._cls_dict["__hash__"] = None
+ return self
+
+ def add_hash(self):
+ script, globs = _make_hash_script(
+ self._cls,
+ self._attrs,
+ frozen=self._frozen,
+ cache_hash=self._cache_hash,
+ )
+
+ def attach_hash(cls_dict: dict, locs: dict) -> None:
+ cls_dict["__hash__"] = self._add_method_dunders(locs["__hash__"])
+
+ self._script_snippets.append((script, globs, attach_hash))
+
+ return self
+
+ def add_init(self):
+ script, globs, annotations = _make_init_script(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._pre_init_has_args,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr,
+ attrs_init=False,
+ )
+
+ def _attach_init(cls_dict, globs):
+ init = globs["__init__"]
+ init.__annotations__ = annotations
+ cls_dict["__init__"] = self._add_method_dunders(init)
+
+ self._script_snippets.append((script, globs, _attach_init))
+
+ return self
+
+ def add_replace(self):
+ self._cls_dict["__replace__"] = self._add_method_dunders(
+ lambda self, **changes: evolve(self, **changes)
+ )
+ return self
+
+ def add_match_args(self):
+ self._cls_dict["__match_args__"] = tuple(
+ field.name
+ for field in self._attrs
+ if field.init and not field.kw_only
+ )
+
+ def add_attrs_init(self):
+ script, globs, annotations = _make_init_script(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._pre_init_has_args,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr,
+ attrs_init=True,
+ )
+
+ def _attach_attrs_init(cls_dict, globs):
+ init = globs["__attrs_init__"]
+ init.__annotations__ = annotations
+ cls_dict["__attrs_init__"] = self._add_method_dunders(init)
+
+ self._script_snippets.append((script, globs, _attach_attrs_init))
+
+ return self
+
+ def add_eq(self):
+ cd = self._cls_dict
+
+ script, globs = _make_eq_script(self._attrs)
+
+ def _attach_eq(cls_dict, globs):
+ cls_dict["__eq__"] = self._add_method_dunders(globs["__eq__"])
+
+ self._script_snippets.append((script, globs, _attach_eq))
+
+ cd["__ne__"] = __ne__
+
+ return self
+
+ def add_order(self):
+ cd = self._cls_dict
+
+ cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = (
+ self._add_method_dunders(meth)
+ for meth in _make_order(self._cls, self._attrs)
+ )
+
+ return self
+
+ def add_setattr(self):
+ sa_attrs = {}
+ for a in self._attrs:
+ on_setattr = a.on_setattr or self._on_setattr
+ if on_setattr and on_setattr is not setters.NO_OP:
+ sa_attrs[a.name] = a, on_setattr
+
+ if not sa_attrs:
+ return self
+
+ if self._has_custom_setattr:
+ # We need to write a __setattr__ but there already is one!
+ msg = "Can't combine custom __setattr__ with on_setattr hooks."
+ raise ValueError(msg)
+
+ # docstring comes from _add_method_dunders
+ def __setattr__(self, name, val):
+ try:
+ a, hook = sa_attrs[name]
+ except KeyError:
+ nval = val
+ else:
+ nval = hook(self, a, val)
+
+ _OBJ_SETATTR(self, name, nval)
+
+ self._cls_dict["__attrs_own_setattr__"] = True
+ self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__)
+ self._wrote_own_setattr = True
+
+ return self
+
+ def _add_method_dunders_unsafe(self, method: Callable) -> Callable:
+ """
+ Add __module__ and __qualname__ to a *method*.
+ """
+ method.__module__ = self._cls.__module__
+
+ method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}"
+
+ method.__doc__ = (
+ f"Method generated by attrs for class {self._cls.__qualname__}."
+ )
+
+ return method
+
+ def _add_method_dunders_safe(self, method: Callable) -> Callable:
+ """
+ Add __module__ and __qualname__ to a *method* if possible.
+ """
+ with contextlib.suppress(AttributeError):
+ method.__module__ = self._cls.__module__
+
+ with contextlib.suppress(AttributeError):
+ method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}"
+
+ with contextlib.suppress(AttributeError):
+ method.__doc__ = f"Method generated by attrs for class {self._cls.__qualname__}."
+
+ return method
+
+
+def _determine_attrs_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ msg = "Don't mix `cmp` with `eq' and `order`."
+ raise ValueError(msg)
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ return cmp, cmp
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq = default_eq
+
+ if order is None:
+ order = eq
+
+ if eq is False and order is True:
+ msg = "`order` can only be True if `eq` is True too."
+ raise ValueError(msg)
+
+ return eq, order
+
+
+def _determine_attrib_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ msg = "Don't mix `cmp` with `eq' and `order`."
+ raise ValueError(msg)
+
+ def decide_callable_or_boolean(value):
+ """
+ Decide whether a key function is used.
+ """
+ if callable(value):
+ value, key = True, value
+ else:
+ key = None
+ return value, key
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ cmp, cmp_key = decide_callable_or_boolean(cmp)
+ return cmp, cmp_key, cmp, cmp_key
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq, eq_key = default_eq, None
+ else:
+ eq, eq_key = decide_callable_or_boolean(eq)
+
+ if order is None:
+ order, order_key = eq, eq_key
+ else:
+ order, order_key = decide_callable_or_boolean(order)
+
+ if eq is False and order is True:
+ msg = "`order` can only be True if `eq` is True too."
+ raise ValueError(msg)
+
+ return eq, eq_key, order, order_key
+
+
+def _determine_whether_to_implement(
+ cls, flag, auto_detect, dunders, default=True
+):
+ """
+ Check whether we should implement a set of methods for *cls*.
+
+ *flag* is the argument passed into @attr.s like 'init', *auto_detect* the
+ same as passed into @attr.s and *dunders* is a tuple of attribute names
+ whose presence signal that the user has implemented it themselves.
+
+ Return *default* if no reason for either for or against is found.
+ """
+ if flag is True or flag is False:
+ return flag
+
+ if flag is None and auto_detect is False:
+ return default
+
+ # Logically, flag is None and auto_detect is True here.
+ for dunder in dunders:
+ if _has_own_attribute(cls, dunder):
+ return False
+
+ return default
+
+
+def attrs(
+ maybe_cls=None,
+ these=None,
+ repr_ns=None,
+ repr=None,
+ cmp=None,
+ hash=None,
+ init=None,
+ slots=False,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=False,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=False,
+ eq=None,
+ order=None,
+ auto_detect=False,
+ collect_by_mro=False,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+ match_args=True,
+ unsafe_hash=None,
+ force_kw_only=True,
+):
+ r"""
+ A class decorator that adds :term:`dunder methods` according to the
+ specified attributes using `attr.ib` or the *these* argument.
+
+ Consider using `attrs.define` / `attrs.frozen` in new code (``attr.s`` will
+ *never* go away, though).
+
+ Args:
+ repr_ns (str):
+ When using nested classes, there was no way in Python 2 to
+ automatically detect that. This argument allows to set a custom
+ name for a more meaningful ``repr`` output. This argument is
+ pointless in Python 3 and is therefore deprecated.
+
+ .. caution::
+ Refer to `attrs.define` for the rest of the parameters, but note that they
+ can have different defaults.
+
+ Notably, leaving *on_setattr* as `None` will **not** add any hooks.
+
+ .. versionadded:: 16.0.0 *slots*
+ .. versionadded:: 16.1.0 *frozen*
+ .. versionadded:: 16.3.0 *str*
+ .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``.
+ .. versionchanged:: 17.1.0
+ *hash* supports `None` as value which is also the default now.
+ .. versionadded:: 17.3.0 *auto_attribs*
+ .. versionchanged:: 18.1.0
+ If *these* is passed, no attributes are deleted from the class body.
+ .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
+ .. versionadded:: 18.2.0 *weakref_slot*
+ .. deprecated:: 18.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
+ `DeprecationWarning` if the classes compared are subclasses of
+ each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
+ to each other.
+ .. versionchanged:: 19.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider
+ subclasses comparable anymore.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionadded:: 18.2.0 *cache_hash*
+ .. versionadded:: 19.1.0 *auto_exc*
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *auto_detect*
+ .. versionadded:: 20.1.0 *collect_by_mro*
+ .. versionadded:: 20.1.0 *getstate_setstate*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionadded:: 20.3.0 *field_transformer*
+ .. versionchanged:: 21.1.0
+ ``init=False`` injects ``__attrs_init__``
+ .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__``
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ .. versionadded:: 21.3.0 *match_args*
+ .. versionadded:: 22.2.0
+ *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
+ .. deprecated:: 24.1.0 *repr_ns*
+ .. versionchanged:: 24.1.0
+ Instances are not compared as tuples of attributes anymore, but using a
+ big ``and`` condition. This is faster and has more correct behavior for
+ uncomparable values like `math.nan`.
+ .. versionadded:: 24.1.0
+ If a class has an *inherited* classmethod called
+ ``__attrs_init_subclass__``, it is executed after the class is created.
+ .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*.
+ .. versionchanged:: 25.4.0
+ *kw_only* now only applies to attributes defined in the current class,
+ and respects attribute-level ``kw_only=False`` settings.
+ .. versionadded:: 25.4.0 *force_kw_only*
+ """
+ if repr_ns is not None:
+ import warnings
+
+ warnings.warn(
+ DeprecationWarning(
+ "The `repr_ns` argument is deprecated and will be removed in or after August 2025."
+ ),
+ stacklevel=2,
+ )
+
+ eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None)
+
+ # unsafe_hash takes precedence due to PEP 681.
+ if unsafe_hash is not None:
+ hash = unsafe_hash
+
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ def wrap(cls):
+ nonlocal hash
+ is_frozen = frozen or _has_frozen_base_class(cls)
+ is_exc = auto_exc is True and issubclass(cls, BaseException)
+ has_own_setattr = auto_detect and _has_own_attribute(
+ cls, "__setattr__"
+ )
+
+ if has_own_setattr and is_frozen:
+ msg = "Can't freeze a class with a custom __setattr__."
+ raise ValueError(msg)
+
+ eq = not is_exc and _determine_whether_to_implement(
+ cls, eq_, auto_detect, ("__eq__", "__ne__")
+ )
+
+ Hashability = ClassProps.Hashability
+
+ if is_exc:
+ hashability = Hashability.LEAVE_ALONE
+ elif hash is True:
+ hashability = (
+ Hashability.HASHABLE_CACHED
+ if cache_hash
+ else Hashability.HASHABLE
+ )
+ elif hash is False:
+ hashability = Hashability.LEAVE_ALONE
+ elif hash is None:
+ if auto_detect is True and _has_own_attribute(cls, "__hash__"):
+ hashability = Hashability.LEAVE_ALONE
+ elif eq is True and is_frozen is True:
+ hashability = (
+ Hashability.HASHABLE_CACHED
+ if cache_hash
+ else Hashability.HASHABLE
+ )
+ elif eq is False:
+ hashability = Hashability.LEAVE_ALONE
+ else:
+ hashability = Hashability.UNHASHABLE
+ else:
+ msg = "Invalid value for hash. Must be True, False, or None."
+ raise TypeError(msg)
+
+ KeywordOnly = ClassProps.KeywordOnly
+ if kw_only:
+ kwo = KeywordOnly.FORCE if force_kw_only else KeywordOnly.YES
+ else:
+ kwo = KeywordOnly.NO
+
+ props = ClassProps(
+ is_exception=is_exc,
+ is_frozen=is_frozen,
+ is_slotted=slots,
+ collected_fields_by_mro=collect_by_mro,
+ added_init=_determine_whether_to_implement(
+ cls, init, auto_detect, ("__init__",)
+ ),
+ added_repr=_determine_whether_to_implement(
+ cls, repr, auto_detect, ("__repr__",)
+ ),
+ added_eq=eq,
+ added_ordering=not is_exc
+ and _determine_whether_to_implement(
+ cls,
+ order_,
+ auto_detect,
+ ("__lt__", "__le__", "__gt__", "__ge__"),
+ ),
+ hashability=hashability,
+ added_match_args=match_args,
+ kw_only=kwo,
+ has_weakref_slot=weakref_slot,
+ added_str=str,
+ added_pickling=_determine_whether_to_implement(
+ cls,
+ getstate_setstate,
+ auto_detect,
+ ("__getstate__", "__setstate__"),
+ default=slots,
+ ),
+ on_setattr_hook=on_setattr,
+ field_transformer=field_transformer,
+ )
+
+ if not props.is_hashable and cache_hash:
+ msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled."
+ raise TypeError(msg)
+
+ builder = _ClassBuilder(
+ cls,
+ these,
+ auto_attribs=auto_attribs,
+ props=props,
+ has_custom_setattr=has_own_setattr,
+ )
+
+ if props.added_repr:
+ builder.add_repr(repr_ns)
+
+ if props.added_str:
+ builder.add_str()
+
+ if props.added_eq:
+ builder.add_eq()
+ if props.added_ordering:
+ builder.add_order()
+
+ if not frozen:
+ builder.add_setattr()
+
+ if props.is_hashable:
+ builder.add_hash()
+ elif props.hashability is Hashability.UNHASHABLE:
+ builder.make_unhashable()
+
+ if props.added_init:
+ builder.add_init()
+ else:
+ builder.add_attrs_init()
+ if cache_hash:
+ msg = "Invalid value for cache_hash. To use hash caching, init must be True."
+ raise TypeError(msg)
+
+ if PY_3_13_PLUS and not _has_own_attribute(cls, "__replace__"):
+ builder.add_replace()
+
+ if (
+ PY_3_10_PLUS
+ and match_args
+ and not _has_own_attribute(cls, "__match_args__")
+ ):
+ builder.add_match_args()
+
+ return builder.build_class()
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but `None` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+
+ return wrap(maybe_cls)
+
+
+_attrs = attrs
+"""
+Internal alias so we can use it in functions that take an argument called
+*attrs*.
+"""
+
+
+def _has_frozen_base_class(cls):
+ """
+ Check whether *cls* has a frozen ancestor by looking at its
+ __setattr__.
+ """
+ return cls.__setattr__ is _frozen_setattrs
+
+
+def _generate_unique_filename(cls: type, func_name: str) -> str:
+ """
+ Create a "filename" suitable for a function being generated.
+ """
+ return (
+ f""
+ )
+
+
+def _make_hash_script(
+ cls: type, attrs: list[Attribute], frozen: bool, cache_hash: bool
+) -> tuple[str, dict]:
+ attrs = tuple(
+ a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
+ )
+
+ tab = " "
+
+ type_hash = hash(_generate_unique_filename(cls, "hash"))
+ # If eq is custom generated, we need to include the functions in globs
+ globs = {}
+
+ hash_def = "def __hash__(self"
+ hash_func = "hash(("
+ closing_braces = "))"
+ if not cache_hash:
+ hash_def += "):"
+ else:
+ hash_def += ", *"
+
+ hash_def += ", _cache_wrapper=__import__('attr._make')._make._CacheHashWrapper):"
+ hash_func = "_cache_wrapper(" + hash_func
+ closing_braces += ")"
+
+ method_lines = [hash_def]
+
+ def append_hash_computation_lines(prefix, indent):
+ """
+ Generate the code for actually computing the hash code.
+ Below this will either be returned directly or used to compute
+ a value which is then cached, depending on the value of cache_hash
+ """
+
+ method_lines.extend(
+ [
+ indent + prefix + hash_func,
+ indent + f" {type_hash},",
+ ]
+ )
+
+ for a in attrs:
+ if a.eq_key:
+ cmp_name = f"_{a.name}_key"
+ globs[cmp_name] = a.eq_key
+ method_lines.append(
+ indent + f" {cmp_name}(self.{a.name}),"
+ )
+ else:
+ method_lines.append(indent + f" self.{a.name},")
+
+ method_lines.append(indent + " " + closing_braces)
+
+ if cache_hash:
+ method_lines.append(tab + f"if self.{_HASH_CACHE_FIELD} is None:")
+ if frozen:
+ append_hash_computation_lines(
+ f"object.__setattr__(self, '{_HASH_CACHE_FIELD}', ", tab * 2
+ )
+ method_lines.append(tab * 2 + ")") # close __setattr__
+ else:
+ append_hash_computation_lines(
+ f"self.{_HASH_CACHE_FIELD} = ", tab * 2
+ )
+ method_lines.append(tab + f"return self.{_HASH_CACHE_FIELD}")
+ else:
+ append_hash_computation_lines("return ", tab)
+
+ script = "\n".join(method_lines)
+ return script, globs
+
+
+def _add_hash(cls: type, attrs: list[Attribute]):
+ """
+ Add a hash method to *cls*.
+ """
+ script, globs = _make_hash_script(
+ cls, attrs, frozen=False, cache_hash=False
+ )
+ _compile_and_eval(
+ script, globs, filename=_generate_unique_filename(cls, "__hash__")
+ )
+ cls.__hash__ = globs["__hash__"]
+ return cls
+
+
+def __ne__(self, other):
+ """
+ Check equality and either forward a NotImplemented or
+ return the result negated.
+ """
+ result = self.__eq__(other)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return not result
+
+
+def _make_eq_script(attrs: list) -> tuple[str, dict]:
+ """
+ Create __eq__ method for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.eq]
+
+ lines = [
+ "def __eq__(self, other):",
+ " if other.__class__ is not self.__class__:",
+ " return NotImplemented",
+ ]
+
+ globs = {}
+ if attrs:
+ lines.append(" return (")
+ for a in attrs:
+ if a.eq_key:
+ cmp_name = f"_{a.name}_key"
+ # Add the key function to the global namespace
+ # of the evaluated function.
+ globs[cmp_name] = a.eq_key
+ lines.append(
+ f" {cmp_name}(self.{a.name}) == {cmp_name}(other.{a.name})"
+ )
+ else:
+ lines.append(f" self.{a.name} == other.{a.name}")
+ if a is not attrs[-1]:
+ lines[-1] = f"{lines[-1]} and"
+ lines.append(" )")
+ else:
+ lines.append(" return True")
+
+ script = "\n".join(lines)
+
+ return script, globs
+
+
+def _make_order(cls, attrs):
+ """
+ Create ordering methods for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.order]
+
+ def attrs_to_tuple(obj):
+ """
+ Save us some typing.
+ """
+ return tuple(
+ key(value) if key else value
+ for value, key in (
+ (getattr(obj, a.name), a.order_key) for a in attrs
+ )
+ )
+
+ def __lt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) < attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __le__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) <= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __gt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) > attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __ge__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) >= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ return __lt__, __le__, __gt__, __ge__
+
+
+def _add_eq(cls, attrs=None):
+ """
+ Add equality methods to *cls* with *attrs*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ script, globs = _make_eq_script(attrs)
+ _compile_and_eval(
+ script, globs, filename=_generate_unique_filename(cls, "__eq__")
+ )
+ cls.__eq__ = globs["__eq__"]
+ cls.__ne__ = __ne__
+
+ return cls
+
+
+def _make_repr_script(attrs, ns) -> tuple[str, dict]:
+ """
+ Create the source and globs for a __repr__ and return it.
+ """
+ # Figure out which attributes to include, and which function to use to
+ # format them. The a.repr value can be either bool or a custom
+ # callable.
+ attr_names_with_reprs = tuple(
+ (a.name, (repr if a.repr is True else a.repr), a.init)
+ for a in attrs
+ if a.repr is not False
+ )
+ globs = {
+ name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr
+ }
+ globs["_compat"] = _compat
+ globs["AttributeError"] = AttributeError
+ globs["NOTHING"] = NOTHING
+ attribute_fragments = []
+ for name, r, i in attr_names_with_reprs:
+ accessor = (
+ "self." + name if i else 'getattr(self, "' + name + '", NOTHING)'
+ )
+ fragment = (
+ "%s={%s!r}" % (name, accessor)
+ if r == repr
+ else "%s={%s_repr(%s)}" % (name, name, accessor)
+ )
+ attribute_fragments.append(fragment)
+ repr_fragment = ", ".join(attribute_fragments)
+
+ if ns is None:
+ cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}'
+ else:
+ cls_name_fragment = ns + ".{self.__class__.__name__}"
+
+ lines = [
+ "def __repr__(self):",
+ " try:",
+ " already_repring = _compat.repr_context.already_repring",
+ " except AttributeError:",
+ " already_repring = {id(self),}",
+ " _compat.repr_context.already_repring = already_repring",
+ " else:",
+ " if id(self) in already_repring:",
+ " return '...'",
+ " else:",
+ " already_repring.add(id(self))",
+ " try:",
+ f" return f'{cls_name_fragment}({repr_fragment})'",
+ " finally:",
+ " already_repring.remove(id(self))",
+ ]
+
+ return "\n".join(lines), globs
+
+
+def _add_repr(cls, ns=None, attrs=None):
+ """
+ Add a repr method to *cls*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ script, globs = _make_repr_script(attrs, ns)
+ _compile_and_eval(
+ script, globs, filename=_generate_unique_filename(cls, "__repr__")
+ )
+ cls.__repr__ = globs["__repr__"]
+ return cls
+
+
+def fields(cls):
+ """
+ Return the tuple of *attrs* attributes for a class.
+
+ The tuple also allows accessing the fields by their names (see below for
+ examples).
+
+ Args:
+ cls (type): Class to introspect.
+
+ Raises:
+ TypeError: If *cls* is not a class.
+
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ Returns:
+ tuple (with name accessors) of `attrs.Attribute`
+
+ .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields
+ by name.
+ .. versionchanged:: 23.1.0 Add support for generic classes.
+ """
+ generic_base = get_generic_base(cls)
+
+ if generic_base is None and not isinstance(cls, type):
+ msg = "Passed object must be a class."
+ raise TypeError(msg)
+
+ attrs = getattr(cls, "__attrs_attrs__", None)
+
+ if attrs is None:
+ if generic_base is not None:
+ attrs = getattr(generic_base, "__attrs_attrs__", None)
+ if attrs is not None:
+ # Even though this is global state, stick it on here to speed
+ # it up. We rely on `cls` being cached for this to be
+ # efficient.
+ cls.__attrs_attrs__ = attrs
+ return attrs
+ msg = f"{cls!r} is not an attrs-decorated class."
+ raise NotAnAttrsClassError(msg)
+
+ return attrs
+
+
+def fields_dict(cls):
+ """
+ Return an ordered dictionary of *attrs* attributes for a class, whose keys
+ are the attribute names.
+
+ Args:
+ cls (type): Class to introspect.
+
+ Raises:
+ TypeError: If *cls* is not a class.
+
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ Returns:
+ dict[str, attrs.Attribute]: Dict of attribute name to definition
+
+ .. versionadded:: 18.1.0
+ """
+ if not isinstance(cls, type):
+ msg = "Passed object must be a class."
+ raise TypeError(msg)
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is None:
+ msg = f"{cls!r} is not an attrs-decorated class."
+ raise NotAnAttrsClassError(msg)
+ return {a.name: a for a in attrs}
+
+
+def validate(inst):
+ """
+ Validate all attributes on *inst* that have a validator.
+
+ Leaves all exceptions through.
+
+ Args:
+ inst: Instance of a class with *attrs* attributes.
+ """
+ if _config._run_validators is False:
+ return
+
+ for a in fields(inst.__class__):
+ v = a.validator
+ if v is not None:
+ v(inst, a, getattr(inst, a.name))
+
+
+def _is_slot_attr(a_name, base_attr_map):
+ """
+ Check if the attribute name comes from a slot class.
+ """
+ cls = base_attr_map.get(a_name)
+ return cls and "__slots__" in cls.__dict__
+
+
+def _make_init_script(
+ cls,
+ attrs,
+ pre_init,
+ pre_init_has_args,
+ post_init,
+ frozen,
+ slots,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ cls_on_setattr,
+ attrs_init,
+) -> tuple[str, dict, dict]:
+ has_cls_on_setattr = (
+ cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP
+ )
+
+ if frozen and has_cls_on_setattr:
+ msg = "Frozen classes can't use on_setattr."
+ raise ValueError(msg)
+
+ needs_cached_setattr = cache_hash or frozen
+ filtered_attrs = []
+ attr_dict = {}
+ for a in attrs:
+ if not a.init and a.default is NOTHING:
+ continue
+
+ filtered_attrs.append(a)
+ attr_dict[a.name] = a
+
+ if a.on_setattr is not None:
+ if frozen is True:
+ msg = "Frozen classes can't use on_setattr."
+ raise ValueError(msg)
+
+ needs_cached_setattr = True
+ elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP:
+ needs_cached_setattr = True
+
+ script, globs, annotations = _attrs_to_init_script(
+ filtered_attrs,
+ frozen,
+ slots,
+ pre_init,
+ pre_init_has_args,
+ post_init,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ needs_cached_setattr,
+ has_cls_on_setattr,
+ "__attrs_init__" if attrs_init else "__init__",
+ )
+ if cls.__module__ in sys.modules:
+ # This makes typing.get_type_hints(CLS.__init__) resolve string types.
+ globs.update(sys.modules[cls.__module__].__dict__)
+
+ globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
+
+ if needs_cached_setattr:
+ # Save the lookup overhead in __init__ if we need to circumvent
+ # setattr hooks.
+ globs["_cached_setattr_get"] = _OBJ_SETATTR.__get__
+
+ return script, globs, annotations
+
+
+def _setattr(attr_name: str, value_var: str, has_on_setattr: bool) -> str:
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*.
+ """
+ return f"_setattr('{attr_name}', {value_var})"
+
+
+def _setattr_with_converter(
+ attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter
+) -> str:
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*, but run
+ its converter first.
+ """
+ return f"_setattr('{attr_name}', {converter._fmt_converter_call(attr_name, value_var)})"
+
+
+def _assign(attr_name: str, value: str, has_on_setattr: bool) -> str:
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise
+ relegate to _setattr.
+ """
+ if has_on_setattr:
+ return _setattr(attr_name, value, True)
+
+ return f"self.{attr_name} = {value}"
+
+
+def _assign_with_converter(
+ attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter
+) -> str:
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment after
+ conversion. Otherwise relegate to _setattr_with_converter.
+ """
+ if has_on_setattr:
+ return _setattr_with_converter(attr_name, value_var, True, converter)
+
+ return f"self.{attr_name} = {converter._fmt_converter_call(attr_name, value_var)}"
+
+
+def _determine_setters(
+ frozen: bool, slots: bool, base_attr_map: dict[str, type]
+):
+ """
+ Determine the correct setter functions based on whether a class is frozen
+ and/or slotted.
+ """
+ if frozen is True:
+ if slots is True:
+ return (), _setattr, _setattr_with_converter
+
+ # Dict frozen classes assign directly to __dict__.
+ # But only if the attribute doesn't come from an ancestor slot
+ # class.
+ # Note _inst_dict will be used again below if cache_hash is True
+
+ def fmt_setter(
+ attr_name: str, value_var: str, has_on_setattr: bool
+ ) -> str:
+ if _is_slot_attr(attr_name, base_attr_map):
+ return _setattr(attr_name, value_var, has_on_setattr)
+
+ return f"_inst_dict['{attr_name}'] = {value_var}"
+
+ def fmt_setter_with_converter(
+ attr_name: str,
+ value_var: str,
+ has_on_setattr: bool,
+ converter: Converter,
+ ) -> str:
+ if has_on_setattr or _is_slot_attr(attr_name, base_attr_map):
+ return _setattr_with_converter(
+ attr_name, value_var, has_on_setattr, converter
+ )
+
+ return f"_inst_dict['{attr_name}'] = {converter._fmt_converter_call(attr_name, value_var)}"
+
+ return (
+ ("_inst_dict = self.__dict__",),
+ fmt_setter,
+ fmt_setter_with_converter,
+ )
+
+ # Not frozen -- we can just assign directly.
+ return (), _assign, _assign_with_converter
+
+
+def _attrs_to_init_script(
+ attrs: list[Attribute],
+ is_frozen: bool,
+ is_slotted: bool,
+ call_pre_init: bool,
+ pre_init_has_args: bool,
+ call_post_init: bool,
+ does_cache_hash: bool,
+ base_attr_map: dict[str, type],
+ is_exc: bool,
+ needs_cached_setattr: bool,
+ has_cls_on_setattr: bool,
+ method_name: str,
+) -> tuple[str, dict, dict]:
+ """
+ Return a script of an initializer for *attrs*, a dict of globals, and
+ annotations for the initializer.
+
+ The globals are required by the generated script.
+ """
+ lines = ["self.__attrs_pre_init__()"] if call_pre_init else []
+
+ if needs_cached_setattr:
+ lines.append(
+ # Circumvent the __setattr__ descriptor to save one lookup per
+ # assignment. Note _setattr will be used again below if
+ # does_cache_hash is True.
+ "_setattr = _cached_setattr_get(self)"
+ )
+
+ extra_lines, fmt_setter, fmt_setter_with_converter = _determine_setters(
+ is_frozen, is_slotted, base_attr_map
+ )
+ lines.extend(extra_lines)
+
+ args = [] # Parameters in the definition of __init__
+ pre_init_args = [] # Parameters in the call to __attrs_pre_init__
+ kw_only_args = [] # Used for both 'args' and 'pre_init_args' above
+ attrs_to_validate = []
+
+ # This is a dictionary of names to validator and converter callables.
+ # Injecting this into __init__ globals lets us avoid lookups.
+ names_for_globals = {}
+ annotations = {"return": None}
+
+ for a in attrs:
+ if a.validator:
+ attrs_to_validate.append(a)
+
+ attr_name = a.name
+ has_on_setattr = a.on_setattr is not None or (
+ a.on_setattr is not setters.NO_OP and has_cls_on_setattr
+ )
+ # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not
+ # explicitly provided
+ arg_name = a.alias
+
+ has_factory = isinstance(a.default, Factory)
+ maybe_self = "self" if has_factory and a.default.takes_self else ""
+
+ if a.converter is not None and not isinstance(a.converter, Converter):
+ converter = Converter(a.converter)
+ else:
+ converter = a.converter
+
+ if a.init is False:
+ if has_factory:
+ init_factory_name = _INIT_FACTORY_PAT % (a.name,)
+ if converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + f"({maybe_self})",
+ has_on_setattr,
+ converter,
+ )
+ )
+ names_for_globals[converter._get_global_name(a.name)] = (
+ converter.converter
+ )
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ init_factory_name + f"({maybe_self})",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ elif converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ f"attr_dict['{attr_name}'].default",
+ has_on_setattr,
+ converter,
+ )
+ )
+ names_for_globals[converter._get_global_name(a.name)] = (
+ converter.converter
+ )
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ f"attr_dict['{attr_name}'].default",
+ has_on_setattr,
+ )
+ )
+ elif a.default is not NOTHING and not has_factory:
+ arg = f"{arg_name}=attr_dict['{attr_name}'].default"
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+ pre_init_args.append(arg_name)
+
+ if converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr, converter
+ )
+ )
+ names_for_globals[converter._get_global_name(a.name)] = (
+ converter.converter
+ )
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ elif has_factory:
+ arg = f"{arg_name}=NOTHING"
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+ pre_init_args.append(arg_name)
+ lines.append(f"if {arg_name} is not NOTHING:")
+
+ init_factory_name = _INIT_FACTORY_PAT % (a.name,)
+ if converter is not None:
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr, converter
+ )
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ converter,
+ )
+ )
+ names_for_globals[converter._get_global_name(a.name)] = (
+ converter.converter
+ )
+ else:
+ lines.append(
+ " " + fmt_setter(attr_name, arg_name, has_on_setattr)
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ else:
+ if a.kw_only:
+ kw_only_args.append(arg_name)
+ else:
+ args.append(arg_name)
+ pre_init_args.append(arg_name)
+
+ if converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr, converter
+ )
+ )
+ names_for_globals[converter._get_global_name(a.name)] = (
+ converter.converter
+ )
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ if a.init is True:
+ if a.type is not None and converter is None:
+ annotations[arg_name] = a.type
+ elif converter is not None and converter._first_param_type:
+ # Use the type from the converter if present.
+ annotations[arg_name] = converter._first_param_type
+
+ if attrs_to_validate: # we can skip this if there are no validators.
+ names_for_globals["_config"] = _config
+ lines.append("if _config._run_validators is True:")
+ for a in attrs_to_validate:
+ val_name = "__attr_validator_" + a.name
+ attr_name = "__attr_" + a.name
+ lines.append(f" {val_name}(self, {attr_name}, self.{a.name})")
+ names_for_globals[val_name] = a.validator
+ names_for_globals[attr_name] = a
+
+ if call_post_init:
+ lines.append("self.__attrs_post_init__()")
+
+ # Because this is set only after __attrs_post_init__ is called, a crash
+ # will result if post-init tries to access the hash code. This seemed
+ # preferable to setting this beforehand, in which case alteration to field
+ # values during post-init combined with post-init accessing the hash code
+ # would result in silent bugs.
+ if does_cache_hash:
+ if is_frozen:
+ if is_slotted:
+ init_hash_cache = f"_setattr('{_HASH_CACHE_FIELD}', None)"
+ else:
+ init_hash_cache = f"_inst_dict['{_HASH_CACHE_FIELD}'] = None"
+ else:
+ init_hash_cache = f"self.{_HASH_CACHE_FIELD} = None"
+ lines.append(init_hash_cache)
+
+ # For exceptions we rely on BaseException.__init__ for proper
+ # initialization.
+ if is_exc:
+ vals = ",".join(f"self.{a.name}" for a in attrs if a.init)
+
+ lines.append(f"BaseException.__init__(self, {vals})")
+
+ args = ", ".join(args)
+ pre_init_args = ", ".join(pre_init_args)
+ if kw_only_args:
+ # leading comma & kw_only args
+ args += f"{', ' if args else ''}*, {', '.join(kw_only_args)}"
+ pre_init_kw_only_args = ", ".join(
+ [
+ f"{kw_arg_name}={kw_arg_name}"
+ # We need to remove the defaults from the kw_only_args.
+ for kw_arg_name in (kwa.split("=")[0] for kwa in kw_only_args)
+ ]
+ )
+ pre_init_args += ", " if pre_init_args else ""
+ pre_init_args += pre_init_kw_only_args
+
+ if call_pre_init and pre_init_has_args:
+ # If pre init method has arguments, pass the values given to __init__.
+ lines[0] = f"self.__attrs_pre_init__({pre_init_args})"
+
+ # Python <3.12 doesn't allow backslashes in f-strings.
+ NL = "\n "
+ return (
+ f"""def {method_name}(self, {args}):
+ {NL.join(lines) if lines else "pass"}
+""",
+ names_for_globals,
+ annotations,
+ )
+
+
+def _default_init_alias_for(name: str) -> str:
+ """
+ The default __init__ parameter name for a field.
+
+ This performs private-name adjustment via leading-unscore stripping,
+ and is the default value of Attribute.alias if not provided.
+ """
+
+ return name.lstrip("_")
+
+
+class Attribute:
+ """
+ *Read-only* representation of an attribute.
+
+ .. warning::
+
+ You should never instantiate this class yourself.
+
+ The class has *all* arguments of `attr.ib` (except for ``factory`` which is
+ only syntactic sugar for ``default=Factory(...)`` plus the following:
+
+ - ``name`` (`str`): The name of the attribute.
+ - ``alias`` (`str`): The __init__ parameter name of the attribute, after
+ any explicit overrides and default private-attribute-name handling.
+ - ``inherited`` (`bool`): Whether or not that attribute has been inherited
+ from a base class.
+ - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The
+ callables that are used for comparing and ordering objects by this
+ attribute, respectively. These are set by passing a callable to
+ `attr.ib`'s ``eq``, ``order``, or ``cmp`` arguments. See also
+ :ref:`comparison customization `.
+
+ Instances of this class are frequently used for introspection purposes
+ like:
+
+ - `fields` returns a tuple of them.
+ - Validators get them passed as the first argument.
+ - The :ref:`field transformer ` hook receives a list of
+ them.
+ - The ``alias`` property exposes the __init__ parameter name of the field,
+ with any overrides and default private-attribute handling applied.
+
+
+ .. versionadded:: 20.1.0 *inherited*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.2.0 *inherited* is not taken into account for
+ equality checks and hashing anymore.
+ .. versionadded:: 21.1.0 *eq_key* and *order_key*
+ .. versionadded:: 22.2.0 *alias*
+
+ For the full version history of the fields, see `attr.ib`.
+ """
+
+ # These slots must NOT be reordered because we use them later for
+ # instantiation.
+ __slots__ = ( # noqa: RUF023
+ "name",
+ "default",
+ "validator",
+ "repr",
+ "eq",
+ "eq_key",
+ "order",
+ "order_key",
+ "hash",
+ "init",
+ "metadata",
+ "type",
+ "converter",
+ "kw_only",
+ "inherited",
+ "on_setattr",
+ "alias",
+ )
+
+ def __init__(
+ self,
+ name,
+ default,
+ validator,
+ repr,
+ cmp, # XXX: unused, remove along with other cmp code.
+ hash,
+ init,
+ inherited,
+ metadata=None,
+ type=None,
+ converter=None,
+ kw_only=False,
+ eq=None,
+ eq_key=None,
+ order=None,
+ order_key=None,
+ on_setattr=None,
+ alias=None,
+ ):
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq_key or eq, order_key or order, True
+ )
+
+ # Cache this descriptor here to speed things up later.
+ bound_setattr = _OBJ_SETATTR.__get__(self)
+
+ # Despite the big red warning, people *do* instantiate `Attribute`
+ # themselves.
+ bound_setattr("name", name)
+ bound_setattr("default", default)
+ bound_setattr("validator", validator)
+ bound_setattr("repr", repr)
+ bound_setattr("eq", eq)
+ bound_setattr("eq_key", eq_key)
+ bound_setattr("order", order)
+ bound_setattr("order_key", order_key)
+ bound_setattr("hash", hash)
+ bound_setattr("init", init)
+ bound_setattr("converter", converter)
+ bound_setattr(
+ "metadata",
+ (
+ types.MappingProxyType(dict(metadata)) # Shallow copy
+ if metadata
+ else _EMPTY_METADATA_SINGLETON
+ ),
+ )
+ bound_setattr("type", type)
+ bound_setattr("kw_only", kw_only)
+ bound_setattr("inherited", inherited)
+ bound_setattr("on_setattr", on_setattr)
+ bound_setattr("alias", alias)
+
+ def __setattr__(self, name, value):
+ raise FrozenInstanceError
+
+ @classmethod
+ def from_counting_attr(
+ cls, name: str, ca: _CountingAttr, kw_only: bool, type=None
+ ):
+ # The 'kw_only' argument is the class-level setting, and is used if the
+ # attribute itself does not explicitly set 'kw_only'.
+ # type holds the annotated value. deal with conflicts:
+ if type is None:
+ type = ca.type
+ elif ca.type is not None:
+ msg = f"Type annotation and type argument cannot both be present for '{name}'."
+ raise ValueError(msg)
+ return cls(
+ name,
+ ca._default,
+ ca._validator,
+ ca.repr,
+ None,
+ ca.hash,
+ ca.init,
+ False,
+ ca.metadata,
+ type,
+ ca.converter,
+ kw_only if ca.kw_only is None else ca.kw_only,
+ ca.eq,
+ ca.eq_key,
+ ca.order,
+ ca.order_key,
+ ca.on_setattr,
+ ca.alias,
+ )
+
+ # Don't use attrs.evolve since fields(Attribute) doesn't work
+ def evolve(self, **changes):
+ """
+ Copy *self* and apply *changes*.
+
+ This works similarly to `attrs.evolve` but that function does not work
+ with :class:`attrs.Attribute`.
+
+ It is mainly meant to be used for `transform-fields`.
+
+ .. versionadded:: 20.3.0
+ """
+ new = copy.copy(self)
+
+ new._setattrs(changes.items())
+
+ return new
+
+ # Don't use _add_pickle since fields(Attribute) doesn't work
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(
+ getattr(self, name) if name != "metadata" else dict(self.metadata)
+ for name in self.__slots__
+ )
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ self._setattrs(zip(self.__slots__, state))
+
+ def _setattrs(self, name_values_pairs):
+ bound_setattr = _OBJ_SETATTR.__get__(self)
+ for name, value in name_values_pairs:
+ if name != "metadata":
+ bound_setattr(name, value)
+ else:
+ bound_setattr(
+ name,
+ (
+ types.MappingProxyType(dict(value))
+ if value
+ else _EMPTY_METADATA_SINGLETON
+ ),
+ )
+
+
+_a = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=(name != "metadata"),
+ init=True,
+ inherited=False,
+ alias=_default_init_alias_for(name),
+ )
+ for name in Attribute.__slots__
+]
+
+Attribute = _add_hash(
+ _add_eq(
+ _add_repr(Attribute, attrs=_a),
+ attrs=[a for a in _a if a.name != "inherited"],
+ ),
+ attrs=[a for a in _a if a.hash and a.name != "inherited"],
+)
+
+
+class _CountingAttr:
+ """
+ Intermediate representation of attributes that uses a counter to preserve
+ the order in which the attributes have been defined.
+
+ *Internal* data structure of the attrs library. Running into is most
+ likely the result of a bug like a forgotten `@attr.s` decorator.
+ """
+
+ __slots__ = (
+ "_default",
+ "_validator",
+ "alias",
+ "converter",
+ "counter",
+ "eq",
+ "eq_key",
+ "hash",
+ "init",
+ "kw_only",
+ "metadata",
+ "on_setattr",
+ "order",
+ "order_key",
+ "repr",
+ "type",
+ )
+ __attrs_attrs__ = (
+ *tuple(
+ Attribute(
+ name=name,
+ alias=_default_init_alias_for(name),
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=True,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ )
+ for name in (
+ "counter",
+ "_default",
+ "repr",
+ "eq",
+ "order",
+ "hash",
+ "init",
+ "on_setattr",
+ "alias",
+ )
+ ),
+ Attribute(
+ name="metadata",
+ alias="metadata",
+ default=None,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=False,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ ),
+ )
+ cls_counter = 0
+
+ def __init__(
+ self,
+ default,
+ validator,
+ repr,
+ cmp,
+ hash,
+ init,
+ converter,
+ metadata,
+ type,
+ kw_only,
+ eq,
+ eq_key,
+ order,
+ order_key,
+ on_setattr,
+ alias,
+ ):
+ _CountingAttr.cls_counter += 1
+ self.counter = _CountingAttr.cls_counter
+ self._default = default
+ self._validator = validator
+ self.converter = converter
+ self.repr = repr
+ self.eq = eq
+ self.eq_key = eq_key
+ self.order = order
+ self.order_key = order_key
+ self.hash = hash
+ self.init = init
+ self.metadata = metadata
+ self.type = type
+ self.kw_only = kw_only
+ self.on_setattr = on_setattr
+ self.alias = alias
+
+ def validator(self, meth):
+ """
+ Decorator that adds *meth* to the list of validators.
+
+ Returns *meth* unchanged.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._validator is None:
+ self._validator = meth
+ else:
+ self._validator = and_(self._validator, meth)
+ return meth
+
+ def default(self, meth):
+ """
+ Decorator that allows to set the default for an attribute.
+
+ Returns *meth* unchanged.
+
+ Raises:
+ DefaultAlreadySetError: If default has been set before.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._default is not NOTHING:
+ raise DefaultAlreadySetError
+
+ self._default = Factory(meth, takes_self=True)
+
+ return meth
+
+
+_CountingAttr = _add_eq(_add_repr(_CountingAttr))
+
+
+class ClassProps:
+ """
+ Effective class properties as derived from parameters to `attr.s()` or
+ `define()` decorators.
+
+ This is the same data structure that *attrs* uses internally to decide how
+ to construct the final class.
+
+ Warning:
+
+ This feature is currently **experimental** and is not covered by our
+ strict backwards-compatibility guarantees.
+
+
+ Attributes:
+ is_exception (bool):
+ Whether the class is treated as an exception class.
+
+ is_slotted (bool):
+ Whether the class is `slotted `.
+
+ has_weakref_slot (bool):
+ Whether the class has a slot for weak references.
+
+ is_frozen (bool):
+ Whether the class is frozen.
+
+ kw_only (KeywordOnly):
+ Whether / how the class enforces keyword-only arguments on the
+ ``__init__`` method.
+
+ collected_fields_by_mro (bool):
+ Whether the class fields were collected by method resolution order.
+ That is, correctly but unlike `dataclasses`.
+
+ added_init (bool):
+ Whether the class has an *attrs*-generated ``__init__`` method.
+
+ added_repr (bool):
+ Whether the class has an *attrs*-generated ``__repr__`` method.
+
+ added_eq (bool):
+ Whether the class has *attrs*-generated equality methods.
+
+ added_ordering (bool):
+ Whether the class has *attrs*-generated ordering methods.
+
+ hashability (Hashability): How `hashable ` the class is.
+
+ added_match_args (bool):
+ Whether the class supports positional `match ` over its
+ fields.
+
+ added_str (bool):
+ Whether the class has an *attrs*-generated ``__str__`` method.
+
+ added_pickling (bool):
+ Whether the class has *attrs*-generated ``__getstate__`` and
+ ``__setstate__`` methods for `pickle`.
+
+ on_setattr_hook (Callable[[Any, Attribute[Any], Any], Any] | None):
+ The class's ``__setattr__`` hook.
+
+ field_transformer (Callable[[Attribute[Any]], Attribute[Any]] | None):
+ The class's `field transformers `.
+
+ .. versionadded:: 25.4.0
+ """
+
+ class Hashability(enum.Enum):
+ """
+ The hashability of a class.
+
+ .. versionadded:: 25.4.0
+ """
+
+ HASHABLE = "hashable"
+ """Write a ``__hash__``."""
+ HASHABLE_CACHED = "hashable_cache"
+ """Write a ``__hash__`` and cache the hash."""
+ UNHASHABLE = "unhashable"
+ """Set ``__hash__`` to ``None``."""
+ LEAVE_ALONE = "leave_alone"
+ """Don't touch ``__hash__``."""
+
+ class KeywordOnly(enum.Enum):
+ """
+ How attributes should be treated regarding keyword-only parameters.
+
+ .. versionadded:: 25.4.0
+ """
+
+ NO = "no"
+ """Attributes are not keyword-only."""
+ YES = "yes"
+ """Attributes in current class without kw_only=False are keyword-only."""
+ FORCE = "force"
+ """All attributes are keyword-only."""
+
+ __slots__ = ( # noqa: RUF023 -- order matters for __init__
+ "is_exception",
+ "is_slotted",
+ "has_weakref_slot",
+ "is_frozen",
+ "kw_only",
+ "collected_fields_by_mro",
+ "added_init",
+ "added_repr",
+ "added_eq",
+ "added_ordering",
+ "hashability",
+ "added_match_args",
+ "added_str",
+ "added_pickling",
+ "on_setattr_hook",
+ "field_transformer",
+ )
+
+ def __init__(
+ self,
+ is_exception,
+ is_slotted,
+ has_weakref_slot,
+ is_frozen,
+ kw_only,
+ collected_fields_by_mro,
+ added_init,
+ added_repr,
+ added_eq,
+ added_ordering,
+ hashability,
+ added_match_args,
+ added_str,
+ added_pickling,
+ on_setattr_hook,
+ field_transformer,
+ ):
+ self.is_exception = is_exception
+ self.is_slotted = is_slotted
+ self.has_weakref_slot = has_weakref_slot
+ self.is_frozen = is_frozen
+ self.kw_only = kw_only
+ self.collected_fields_by_mro = collected_fields_by_mro
+ self.added_init = added_init
+ self.added_repr = added_repr
+ self.added_eq = added_eq
+ self.added_ordering = added_ordering
+ self.hashability = hashability
+ self.added_match_args = added_match_args
+ self.added_str = added_str
+ self.added_pickling = added_pickling
+ self.on_setattr_hook = on_setattr_hook
+ self.field_transformer = field_transformer
+
+ @property
+ def is_hashable(self):
+ return (
+ self.hashability is ClassProps.Hashability.HASHABLE
+ or self.hashability is ClassProps.Hashability.HASHABLE_CACHED
+ )
+
+
+_cas = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=True,
+ init=True,
+ inherited=False,
+ alias=_default_init_alias_for(name),
+ )
+ for name in ClassProps.__slots__
+]
+
+ClassProps = _add_eq(_add_repr(ClassProps, attrs=_cas), attrs=_cas)
+
+
+class Factory:
+ """
+ Stores a factory callable.
+
+ If passed as the default value to `attrs.field`, the factory is used to
+ generate a new value.
+
+ Args:
+ factory (typing.Callable):
+ A callable that takes either none or exactly one mandatory
+ positional argument depending on *takes_self*.
+
+ takes_self (bool):
+ Pass the partially initialized instance that is being initialized
+ as a positional argument.
+
+ .. versionadded:: 17.1.0 *takes_self*
+ """
+
+ __slots__ = ("factory", "takes_self")
+
+ def __init__(self, factory, takes_self=False):
+ self.factory = factory
+ self.takes_self = takes_self
+
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(getattr(self, name) for name in self.__slots__)
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ for name, value in zip(self.__slots__, state):
+ setattr(self, name, value)
+
+
+_f = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=True,
+ init=True,
+ inherited=False,
+ )
+ for name in Factory.__slots__
+]
+
+Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f)
+
+
+class Converter:
+ """
+ Stores a converter callable.
+
+ Allows for the wrapped converter to take additional arguments. The
+ arguments are passed in the order they are documented.
+
+ Args:
+ converter (Callable): A callable that converts the passed value.
+
+ takes_self (bool):
+ Pass the partially initialized instance that is being initialized
+ as a positional argument. (default: `False`)
+
+ takes_field (bool):
+ Pass the field definition (an :class:`Attribute`) into the
+ converter as a positional argument. (default: `False`)
+
+ .. versionadded:: 24.1.0
+ """
+
+ __slots__ = (
+ "__call__",
+ "_first_param_type",
+ "_global_name",
+ "converter",
+ "takes_field",
+ "takes_self",
+ )
+
+ def __init__(self, converter, *, takes_self=False, takes_field=False):
+ self.converter = converter
+ self.takes_self = takes_self
+ self.takes_field = takes_field
+
+ ex = _AnnotationExtractor(converter)
+ self._first_param_type = ex.get_first_param_type()
+
+ if not (self.takes_self or self.takes_field):
+ self.__call__ = lambda value, _, __: self.converter(value)
+ elif self.takes_self and not self.takes_field:
+ self.__call__ = lambda value, instance, __: self.converter(
+ value, instance
+ )
+ elif not self.takes_self and self.takes_field:
+ self.__call__ = lambda value, __, field: self.converter(
+ value, field
+ )
+ else:
+ self.__call__ = lambda value, instance, field: self.converter(
+ value, instance, field
+ )
+
+ rt = ex.get_return_type()
+ if rt is not None:
+ self.__call__.__annotations__["return"] = rt
+
+ @staticmethod
+ def _get_global_name(attr_name: str) -> str:
+ """
+ Return the name that a converter for an attribute name *attr_name*
+ would have.
+ """
+ return f"__attr_converter_{attr_name}"
+
+ def _fmt_converter_call(self, attr_name: str, value_var: str) -> str:
+ """
+ Return a string that calls the converter for an attribute name
+ *attr_name* and the value in variable named *value_var* according to
+ `self.takes_self` and `self.takes_field`.
+ """
+ if not (self.takes_self or self.takes_field):
+ return f"{self._get_global_name(attr_name)}({value_var})"
+
+ if self.takes_self and self.takes_field:
+ return f"{self._get_global_name(attr_name)}({value_var}, self, attr_dict['{attr_name}'])"
+
+ if self.takes_self:
+ return f"{self._get_global_name(attr_name)}({value_var}, self)"
+
+ return f"{self._get_global_name(attr_name)}({value_var}, attr_dict['{attr_name}'])"
+
+ def __getstate__(self):
+ """
+ Return a dict containing only converter and takes_self -- the rest gets
+ computed when loading.
+ """
+ return {
+ "converter": self.converter,
+ "takes_self": self.takes_self,
+ "takes_field": self.takes_field,
+ }
+
+ def __setstate__(self, state):
+ """
+ Load instance from state.
+ """
+ self.__init__(**state)
+
+
+_f = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=True,
+ init=True,
+ inherited=False,
+ )
+ for name in ("converter", "takes_self", "takes_field")
+]
+
+Converter = _add_hash(
+ _add_eq(_add_repr(Converter, attrs=_f), attrs=_f), attrs=_f
+)
+
+
+def make_class(
+ name, attrs, bases=(object,), class_body=None, **attributes_arguments
+):
+ r"""
+ A quick way to create a new class called *name* with *attrs*.
+
+ .. note::
+
+ ``make_class()`` is a thin wrapper around `attr.s`, not `attrs.define`
+ which means that it doesn't come with some of the improved defaults.
+
+ For example, if you want the same ``on_setattr`` behavior as in
+ `attrs.define`, you have to pass the hooks yourself: ``make_class(...,
+ on_setattr=setters.pipe(setters.convert, setters.validate)``
+
+ .. warning::
+
+ It is *your* duty to ensure that the class name and the attribute names
+ are valid identifiers. ``make_class()`` will *not* validate them for
+ you.
+
+ Args:
+ name (str): The name for the new class.
+
+ attrs (list | dict):
+ A list of names or a dictionary of mappings of names to `attr.ib`\
+ s / `attrs.field`\ s.
+
+ The order is deduced from the order of the names or attributes
+ inside *attrs*. Otherwise the order of the definition of the
+ attributes is used.
+
+ bases (tuple[type, ...]): Classes that the new class will subclass.
+
+ class_body (dict):
+ An optional dictionary of class attributes for the new class.
+
+ attributes_arguments: Passed unmodified to `attr.s`.
+
+ Returns:
+ type: A new class with *attrs*.
+
+ .. versionadded:: 17.1.0 *bases*
+ .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
+ .. versionchanged:: 23.2.0 *class_body*
+ .. versionchanged:: 25.2.0 Class names can now be unicode.
+ """
+ # Class identifiers are converted into the normal form NFKC while parsing
+ name = unicodedata.normalize("NFKC", name)
+
+ if isinstance(attrs, dict):
+ cls_dict = attrs
+ elif isinstance(attrs, (list, tuple)):
+ cls_dict = {a: attrib() for a in attrs}
+ else:
+ msg = "attrs argument must be a dict or a list."
+ raise TypeError(msg)
+
+ pre_init = cls_dict.pop("__attrs_pre_init__", None)
+ post_init = cls_dict.pop("__attrs_post_init__", None)
+ user_init = cls_dict.pop("__init__", None)
+
+ body = {}
+ if class_body is not None:
+ body.update(class_body)
+ if pre_init is not None:
+ body["__attrs_pre_init__"] = pre_init
+ if post_init is not None:
+ body["__attrs_post_init__"] = post_init
+ if user_init is not None:
+ body["__init__"] = user_init
+
+ type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body))
+
+ # For pickling to work, the __module__ variable needs to be set to the
+ # frame where the class is created. Bypass this step in environments where
+ # sys._getframe is not defined (Jython for example) or sys._getframe is not
+ # defined for arguments greater than 0 (IronPython).
+ with contextlib.suppress(AttributeError, ValueError):
+ type_.__module__ = sys._getframe(1).f_globals.get(
+ "__name__", "__main__"
+ )
+
+ # We do it here for proper warnings with meaningful stacklevel.
+ cmp = attributes_arguments.pop("cmp", None)
+ (
+ attributes_arguments["eq"],
+ attributes_arguments["order"],
+ ) = _determine_attrs_eq_order(
+ cmp,
+ attributes_arguments.get("eq"),
+ attributes_arguments.get("order"),
+ True,
+ )
+
+ cls = _attrs(these=cls_dict, **attributes_arguments)(type_)
+ # Only add type annotations now or "_attrs()" will complain:
+ cls.__annotations__ = {
+ k: v.type for k, v in cls_dict.items() if v.type is not None
+ }
+ return cls
+
+
+# These are required by within this module so we define them here and merely
+# import into .validators / .converters.
+
+
+@attrs(slots=True, unsafe_hash=True)
+class _AndValidator:
+ """
+ Compose many validators to a single one.
+ """
+
+ _validators = attrib()
+
+ def __call__(self, inst, attr, value):
+ for v in self._validators:
+ v(inst, attr, value)
+
+
+def and_(*validators):
+ """
+ A validator that composes multiple validators into one.
+
+ When called on a value, it runs all wrapped validators.
+
+ Args:
+ validators (~collections.abc.Iterable[typing.Callable]):
+ Arbitrary number of validators.
+
+ .. versionadded:: 17.1.0
+ """
+ vals = []
+ for validator in validators:
+ vals.extend(
+ validator._validators
+ if isinstance(validator, _AndValidator)
+ else [validator]
+ )
+
+ return _AndValidator(tuple(vals))
+
+
+def pipe(*converters):
+ """
+ A converter that composes multiple converters into one.
+
+ When called on a value, it runs all wrapped converters, returning the
+ *last* value.
+
+ Type annotations will be inferred from the wrapped converters', if they
+ have any.
+
+ converters (~collections.abc.Iterable[typing.Callable]):
+ Arbitrary number of converters.
+
+ .. versionadded:: 20.1.0
+ """
+
+ return_instance = any(isinstance(c, Converter) for c in converters)
+
+ if return_instance:
+
+ def pipe_converter(val, inst, field):
+ for c in converters:
+ val = (
+ c(val, inst, field) if isinstance(c, Converter) else c(val)
+ )
+
+ return val
+
+ else:
+
+ def pipe_converter(val):
+ for c in converters:
+ val = c(val)
+
+ return val
+
+ if not converters:
+ # If the converter list is empty, pipe_converter is the identity.
+ A = TypeVar("A")
+ pipe_converter.__annotations__.update({"val": A, "return": A})
+ else:
+ # Get parameter type from first converter.
+ t = _AnnotationExtractor(converters[0]).get_first_param_type()
+ if t:
+ pipe_converter.__annotations__["val"] = t
+
+ last = converters[-1]
+ if not PY_3_11_PLUS and isinstance(last, Converter):
+ last = last.__call__
+
+ # Get return type from last converter.
+ rt = _AnnotationExtractor(last).get_return_type()
+ if rt:
+ pipe_converter.__annotations__["return"] = rt
+
+ if return_instance:
+ return Converter(pipe_converter, takes_self=True, takes_field=True)
+ return pipe_converter
diff --git a/py311/lib/python3.11/site-packages/attr/_next_gen.py b/py311/lib/python3.11/site-packages/attr/_next_gen.py
new file mode 100644
index 0000000000000000000000000000000000000000..4ccd0da2446dc126ce936b054581a527e247cabc
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/_next_gen.py
@@ -0,0 +1,674 @@
+# SPDX-License-Identifier: MIT
+
+"""
+These are keyword-only APIs that call `attr.s` and `attr.ib` with different
+default values.
+"""
+
+from functools import partial
+
+from . import setters
+from ._funcs import asdict as _asdict
+from ._funcs import astuple as _astuple
+from ._make import (
+ _DEFAULT_ON_SETATTR,
+ NOTHING,
+ _frozen_setattrs,
+ attrib,
+ attrs,
+)
+from .exceptions import NotAnAttrsClassError, UnannotatedAttributeError
+
+
+def define(
+ maybe_cls=None,
+ *,
+ these=None,
+ repr=None,
+ unsafe_hash=None,
+ hash=None,
+ init=None,
+ slots=True,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=None,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=True,
+ eq=None,
+ order=False,
+ auto_detect=True,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+ match_args=True,
+ force_kw_only=False,
+):
+ r"""
+ A class decorator that adds :term:`dunder methods` according to
+ :term:`fields ` specified using :doc:`type annotations `,
+ `field()` calls, or the *these* argument.
+
+ Since *attrs* patches or replaces an existing class, you cannot use
+ `object.__init_subclass__` with *attrs* classes, because it runs too early.
+ As a replacement, you can define ``__attrs_init_subclass__`` on your class.
+ It will be called by *attrs* classes that subclass it after they're
+ created. See also :ref:`init-subclass`.
+
+ Args:
+ slots (bool):
+ Create a :term:`slotted class ` that's more
+ memory-efficient. Slotted classes are generally superior to the
+ default dict classes, but have some gotchas you should know about,
+ so we encourage you to read the :term:`glossary entry `.
+
+ auto_detect (bool):
+ Instead of setting the *init*, *repr*, *eq*, and *hash* arguments
+ explicitly, assume they are set to True **unless any** of the
+ involved methods for one of the arguments is implemented in the
+ *current* class (meaning, it is *not* inherited from some base
+ class).
+
+ So, for example by implementing ``__eq__`` on a class yourself,
+ *attrs* will deduce ``eq=False`` and will create *neither*
+ ``__eq__`` *nor* ``__ne__`` (but Python classes come with a
+ sensible ``__ne__`` by default, so it *should* be enough to only
+ implement ``__eq__`` in most cases).
+
+ Passing :data:`True` or :data:`False` to *init*, *repr*, *eq*, or *hash*
+ overrides whatever *auto_detect* would determine.
+
+ auto_exc (bool):
+ If the class subclasses `BaseException` (which implicitly includes
+ any subclass of any exception), the following happens to behave
+ like a well-behaved Python exception class:
+
+ - the values for *eq*, *order*, and *hash* are ignored and the
+ instances compare and hash by the instance's ids [#]_ ,
+ - all attributes that are either passed into ``__init__`` or have a
+ default value are additionally available as a tuple in the
+ ``args`` attribute,
+ - the value of *str* is ignored leaving ``__str__`` to base
+ classes.
+
+ .. [#]
+ Note that *attrs* will *not* remove existing implementations of
+ ``__hash__`` or the equality methods. It just won't add own
+ ones.
+
+ on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
+ A callable that is run whenever the user attempts to set an
+ attribute (either by assignment like ``i.x = 42`` or by using
+ `setattr` like ``setattr(i, "x", 42)``). It receives the same
+ arguments as validators: the instance, the attribute that is being
+ modified, and the new value.
+
+ If no exception is raised, the attribute is set to the return value
+ of the callable.
+
+ If a list of callables is passed, they're automatically wrapped in
+ an `attrs.setters.pipe`.
+
+ If left None, the default behavior is to run converters and
+ validators whenever an attribute is set.
+
+ init (bool):
+ Create a ``__init__`` method that initializes the *attrs*
+ attributes. Leading underscores are stripped for the argument name,
+ unless an alias is set on the attribute.
+
+ .. seealso::
+ `init` shows advanced ways to customize the generated
+ ``__init__`` method, including executing code before and after.
+
+ repr(bool):
+ Create a ``__repr__`` method with a human readable representation
+ of *attrs* attributes.
+
+ str (bool):
+ Create a ``__str__`` method that is identical to ``__repr__``. This
+ is usually not necessary except for `Exception`\ s.
+
+ eq (bool | None):
+ If True or None (default), add ``__eq__`` and ``__ne__`` methods
+ that check two instances for equality.
+
+ .. seealso::
+ `comparison` describes how to customize the comparison behavior
+ going as far comparing NumPy arrays.
+
+ order (bool | None):
+ If True, add ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__``
+ methods that behave like *eq* above and allow instances to be
+ ordered.
+
+ They compare the instances as if they were tuples of their *attrs*
+ attributes if and only if the types of both classes are
+ *identical*.
+
+ If `None` mirror value of *eq*.
+
+ .. seealso:: `comparison`
+
+ unsafe_hash (bool | None):
+ If None (default), the ``__hash__`` method is generated according
+ how *eq* and *frozen* are set.
+
+ 1. If *both* are True, *attrs* will generate a ``__hash__`` for
+ you.
+ 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set
+ to None, marking it unhashable (which it is).
+ 3. If *eq* is False, ``__hash__`` will be left untouched meaning
+ the ``__hash__`` method of the base class will be used. If the
+ base class is `object`, this means it will fall back to id-based
+ hashing.
+
+ Although not recommended, you can decide for yourself and force
+ *attrs* to create one (for example, if the class is immutable even
+ though you didn't freeze it programmatically) by passing True or
+ not. Both of these cases are rather special and should be used
+ carefully.
+
+ .. seealso::
+
+ - Our documentation on `hashing`,
+ - Python's documentation on `object.__hash__`,
+ - and the `GitHub issue that led to the default \ behavior
+ `_ for more
+ details.
+
+ hash (bool | None):
+ Deprecated alias for *unsafe_hash*. *unsafe_hash* takes precedence.
+
+ cache_hash (bool):
+ Ensure that the object's hash code is computed only once and stored
+ on the object. If this is set to True, hashing must be either
+ explicitly or implicitly enabled for this class. If the hash code
+ is cached, avoid any reassignments of fields involved in hash code
+ computation or mutations of the objects those fields point to after
+ object creation. If such changes occur, the behavior of the
+ object's hash code is undefined.
+
+ frozen (bool):
+ Make instances immutable after initialization. If someone attempts
+ to modify a frozen instance, `attrs.exceptions.FrozenInstanceError`
+ is raised.
+
+ .. note::
+
+ 1. This is achieved by installing a custom ``__setattr__``
+ method on your class, so you can't implement your own.
+
+ 2. True immutability is impossible in Python.
+
+ 3. This *does* have a minor a runtime performance `impact
+ ` when initializing new instances. In other
+ words: ``__init__`` is slightly slower with ``frozen=True``.
+
+ 4. If a class is frozen, you cannot modify ``self`` in
+ ``__attrs_post_init__`` or a self-written ``__init__``. You
+ can circumvent that limitation by using
+ ``object.__setattr__(self, "attribute_name", value)``.
+
+ 5. Subclasses of a frozen class are frozen too.
+
+ kw_only (bool):
+ Make attributes keyword-only in the generated ``__init__`` (if
+ *init* is False, this parameter is ignored). Attributes that
+ explicitly set ``kw_only=False`` are not affected; base class
+ attributes are also not affected.
+
+ Also see *force_kw_only*.
+
+ weakref_slot (bool):
+ Make instances weak-referenceable. This has no effect unless
+ *slots* is True.
+
+ field_transformer (~typing.Callable | None):
+ A function that is called with the original class object and all
+ fields right before *attrs* finalizes the class. You can use this,
+ for example, to automatically add converters or validators to
+ fields based on their types.
+
+ .. seealso:: `transform-fields`
+
+ match_args (bool):
+ If True (default), set ``__match_args__`` on the class to support
+ :pep:`634` (*Structural Pattern Matching*). It is a tuple of all
+ non-keyword-only ``__init__`` parameter names on Python 3.10 and
+ later. Ignored on older Python versions.
+
+ collect_by_mro (bool):
+ If True, *attrs* collects attributes from base classes correctly
+ according to the `method resolution order
+ `_. If False, *attrs*
+ will mimic the (wrong) behavior of `dataclasses` and :pep:`681`.
+
+ See also `issue #428
+ `_.
+
+ force_kw_only (bool):
+ A back-compat flag for restoring pre-25.4.0 behavior. If True and
+ ``kw_only=True``, all attributes are made keyword-only, including
+ base class attributes, and those set to ``kw_only=False`` at the
+ attribute level. Defaults to False.
+
+ See also `issue #980
+ `_.
+
+ getstate_setstate (bool | None):
+ .. note::
+
+ This is usually only interesting for slotted classes and you
+ should probably just set *auto_detect* to True.
+
+ If True, ``__getstate__`` and ``__setstate__`` are generated and
+ attached to the class. This is necessary for slotted classes to be
+ pickleable. If left None, it's True by default for slotted classes
+ and False for dict classes.
+
+ If *auto_detect* is True, and *getstate_setstate* is left None, and
+ **either** ``__getstate__`` or ``__setstate__`` is detected
+ directly on the class (meaning: not inherited), it is set to False
+ (this is usually what you want).
+
+ auto_attribs (bool | None):
+ If True, look at type annotations to determine which attributes to
+ use, like `dataclasses`. If False, it will only look for explicit
+ :func:`field` class attributes, like classic *attrs*.
+
+ If left None, it will guess:
+
+ 1. If any attributes are annotated and no unannotated
+ `attrs.field`\ s are found, it assumes *auto_attribs=True*.
+ 2. Otherwise it assumes *auto_attribs=False* and tries to collect
+ `attrs.field`\ s.
+
+ If *attrs* decides to look at type annotations, **all** fields
+ **must** be annotated. If *attrs* encounters a field that is set to
+ a :func:`field` / `attr.ib` but lacks a type annotation, an
+ `attrs.exceptions.UnannotatedAttributeError` is raised. Use
+ ``field_name: typing.Any = field(...)`` if you don't want to set a
+ type.
+
+ .. warning::
+
+ For features that use the attribute name to create decorators
+ (for example, :ref:`validators `), you still *must*
+ assign :func:`field` / `attr.ib` to them. Otherwise Python will
+ either not find the name or try to use the default value to
+ call, for example, ``validator`` on it.
+
+ Attributes annotated as `typing.ClassVar`, and attributes that are
+ neither annotated nor set to an `field()` are **ignored**.
+
+ these (dict[str, object]):
+ A dictionary of name to the (private) return value of `field()`
+ mappings. This is useful to avoid the definition of your attributes
+ within the class body because you can't (for example, if you want
+ to add ``__repr__`` methods to Django models) or don't want to.
+
+ If *these* is not `None`, *attrs* will *not* search the class body
+ for attributes and will *not* remove any attributes from it.
+
+ The order is deduced from the order of the attributes inside
+ *these*.
+
+ Arguably, this is a rather obscure feature.
+
+ .. versionadded:: 20.1.0
+ .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
+ .. versionadded:: 22.2.0
+ *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
+ .. versionchanged:: 24.1.0
+ Instances are not compared as tuples of attributes anymore, but using a
+ big ``and`` condition. This is faster and has more correct behavior for
+ uncomparable values like `math.nan`.
+ .. versionadded:: 24.1.0
+ If a class has an *inherited* classmethod called
+ ``__attrs_init_subclass__``, it is executed after the class is created.
+ .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*.
+ .. versionadded:: 24.3.0
+ Unless already present, a ``__replace__`` method is automatically
+ created for `copy.replace` (Python 3.13+ only).
+ .. versionchanged:: 25.4.0
+ *kw_only* now only applies to attributes defined in the current class,
+ and respects attribute-level ``kw_only=False`` settings.
+ .. versionadded:: 25.4.0
+ Added *force_kw_only* to go back to the previous *kw_only* behavior.
+
+ .. note::
+
+ The main differences to the classic `attr.s` are:
+
+ - Automatically detect whether or not *auto_attribs* should be `True`
+ (c.f. *auto_attribs* parameter).
+ - Converters and validators run when attributes are set by default --
+ if *frozen* is `False`.
+ - *slots=True*
+
+ Usually, this has only upsides and few visible effects in everyday
+ programming. But it *can* lead to some surprising behaviors, so
+ please make sure to read :term:`slotted classes`.
+
+ - *auto_exc=True*
+ - *auto_detect=True*
+ - *order=False*
+ - *force_kw_only=False*
+ - Some options that were only relevant on Python 2 or were kept around
+ for backwards-compatibility have been removed.
+
+ """
+
+ def do_it(cls, auto_attribs):
+ return attrs(
+ maybe_cls=cls,
+ these=these,
+ repr=repr,
+ hash=hash,
+ unsafe_hash=unsafe_hash,
+ init=init,
+ slots=slots,
+ frozen=frozen,
+ weakref_slot=weakref_slot,
+ str=str,
+ auto_attribs=auto_attribs,
+ kw_only=kw_only,
+ cache_hash=cache_hash,
+ auto_exc=auto_exc,
+ eq=eq,
+ order=order,
+ auto_detect=auto_detect,
+ collect_by_mro=True,
+ getstate_setstate=getstate_setstate,
+ on_setattr=on_setattr,
+ field_transformer=field_transformer,
+ match_args=match_args,
+ force_kw_only=force_kw_only,
+ )
+
+ def wrap(cls):
+ """
+ Making this a wrapper ensures this code runs during class creation.
+
+ We also ensure that frozen-ness of classes is inherited.
+ """
+ nonlocal frozen, on_setattr
+
+ had_on_setattr = on_setattr not in (None, setters.NO_OP)
+
+ # By default, mutable classes convert & validate on setattr.
+ if frozen is False and on_setattr is None:
+ on_setattr = _DEFAULT_ON_SETATTR
+
+ # However, if we subclass a frozen class, we inherit the immutability
+ # and disable on_setattr.
+ for base_cls in cls.__bases__:
+ if base_cls.__setattr__ is _frozen_setattrs:
+ if had_on_setattr:
+ msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)."
+ raise ValueError(msg)
+
+ on_setattr = setters.NO_OP
+ break
+
+ if auto_attribs is not None:
+ return do_it(cls, auto_attribs)
+
+ try:
+ return do_it(cls, True)
+ except UnannotatedAttributeError:
+ return do_it(cls, False)
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but `None` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+
+ return wrap(maybe_cls)
+
+
+mutable = define
+frozen = partial(define, frozen=True, on_setattr=None)
+
+
+def field(
+ *,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ hash=None,
+ init=True,
+ metadata=None,
+ type=None,
+ converter=None,
+ factory=None,
+ kw_only=None,
+ eq=None,
+ order=None,
+ on_setattr=None,
+ alias=None,
+):
+ """
+ Create a new :term:`field` / :term:`attribute` on a class.
+
+ .. warning::
+
+ Does **nothing** unless the class is also decorated with
+ `attrs.define` (or similar)!
+
+ Args:
+ default:
+ A value that is used if an *attrs*-generated ``__init__`` is used
+ and no value is passed while instantiating or the attribute is
+ excluded using ``init=False``.
+
+ If the value is an instance of `attrs.Factory`, its callable will
+ be used to construct a new value (useful for mutable data types
+ like lists or dicts).
+
+ If a default is not set (or set manually to `attrs.NOTHING`), a
+ value *must* be supplied when instantiating; otherwise a
+ `TypeError` will be raised.
+
+ .. seealso:: `defaults`
+
+ factory (~typing.Callable):
+ Syntactic sugar for ``default=attr.Factory(factory)``.
+
+ validator (~typing.Callable | list[~typing.Callable]):
+ Callable that is called by *attrs*-generated ``__init__`` methods
+ after the instance has been initialized. They receive the
+ initialized instance, the :func:`~attrs.Attribute`, and the passed
+ value.
+
+ The return value is *not* inspected so the validator has to throw
+ an exception itself.
+
+ If a `list` is passed, its items are treated as validators and must
+ all pass.
+
+ Validators can be globally disabled and re-enabled using
+ `attrs.validators.get_disabled` / `attrs.validators.set_disabled`.
+
+ The validator can also be set using decorator notation as shown
+ below.
+
+ .. seealso:: :ref:`validators`
+
+ repr (bool | ~typing.Callable):
+ Include this attribute in the generated ``__repr__`` method. If
+ True, include the attribute; if False, omit it. By default, the
+ built-in ``repr()`` function is used. To override how the attribute
+ value is formatted, pass a ``callable`` that takes a single value
+ and returns a string. Note that the resulting string is used as-is,
+ which means it will be used directly *instead* of calling
+ ``repr()`` (the default).
+
+ eq (bool | ~typing.Callable):
+ If True (default), include this attribute in the generated
+ ``__eq__`` and ``__ne__`` methods that check two instances for
+ equality. To override how the attribute value is compared, pass a
+ callable that takes a single value and returns the value to be
+ compared.
+
+ .. seealso:: `comparison`
+
+ order (bool | ~typing.Callable):
+ If True (default), include this attributes in the generated
+ ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. To
+ override how the attribute value is ordered, pass a callable that
+ takes a single value and returns the value to be ordered.
+
+ .. seealso:: `comparison`
+
+ hash (bool | None):
+ Include this attribute in the generated ``__hash__`` method. If
+ None (default), mirror *eq*'s value. This is the correct behavior
+ according the Python spec. Setting this value to anything else
+ than None is *discouraged*.
+
+ .. seealso:: `hashing`
+
+ init (bool):
+ Include this attribute in the generated ``__init__`` method.
+
+ It is possible to set this to False and set a default value. In
+ that case this attributed is unconditionally initialized with the
+ specified default value or factory.
+
+ .. seealso:: `init`
+
+ converter (typing.Callable | Converter):
+ A callable that is called by *attrs*-generated ``__init__`` methods
+ to convert attribute's value to the desired format.
+
+ If a vanilla callable is passed, it is given the passed-in value as
+ the only positional argument. It is possible to receive additional
+ arguments by wrapping the callable in a `Converter`.
+
+ Either way, the returned value will be used as the new value of the
+ attribute. The value is converted before being passed to the
+ validator, if any.
+
+ .. seealso:: :ref:`converters`
+
+ metadata (dict | None):
+ An arbitrary mapping, to be used by third-party code.
+
+ .. seealso:: `extending-metadata`.
+
+ type (type):
+ The type of the attribute. Nowadays, the preferred method to
+ specify the type is using a variable annotation (see :pep:`526`).
+ This argument is provided for backwards-compatibility and for usage
+ with `make_class`. Regardless of the approach used, the type will
+ be stored on ``Attribute.type``.
+
+ Please note that *attrs* doesn't do anything with this metadata by
+ itself. You can use it as part of your own code or for `static type
+ checking `.
+
+ kw_only (bool | None):
+ Make this attribute keyword-only in the generated ``__init__`` (if
+ *init* is False, this parameter is ignored). If None (default),
+ mirror the setting from `attrs.define`.
+
+ on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
+ Allows to overwrite the *on_setattr* setting from `attr.s`. If left
+ None, the *on_setattr* value from `attr.s` is used. Set to
+ `attrs.setters.NO_OP` to run **no** `setattr` hooks for this
+ attribute -- regardless of the setting in `define()`.
+
+ alias (str | None):
+ Override this attribute's parameter name in the generated
+ ``__init__`` method. If left None, default to ``name`` stripped
+ of leading underscores. See `private-attributes`.
+
+ .. versionadded:: 20.1.0
+ .. versionchanged:: 21.1.0
+ *eq*, *order*, and *cmp* also accept a custom callable
+ .. versionadded:: 22.2.0 *alias*
+ .. versionadded:: 23.1.0
+ The *type* parameter has been re-added; mostly for `attrs.make_class`.
+ Please note that type checkers ignore this metadata.
+ .. versionchanged:: 25.4.0
+ *kw_only* can now be None, and its default is also changed from False to
+ None.
+
+ .. seealso::
+
+ `attr.ib`
+ """
+ return attrib(
+ default=default,
+ validator=validator,
+ repr=repr,
+ hash=hash,
+ init=init,
+ metadata=metadata,
+ type=type,
+ converter=converter,
+ factory=factory,
+ kw_only=kw_only,
+ eq=eq,
+ order=order,
+ on_setattr=on_setattr,
+ alias=alias,
+ )
+
+
+def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
+ """
+ Same as `attr.asdict`, except that collections types are always retained
+ and dict is always used as *dict_factory*.
+
+ .. versionadded:: 21.3.0
+ """
+ return _asdict(
+ inst=inst,
+ recurse=recurse,
+ filter=filter,
+ value_serializer=value_serializer,
+ retain_collection_types=True,
+ )
+
+
+def astuple(inst, *, recurse=True, filter=None):
+ """
+ Same as `attr.astuple`, except that collections types are always retained
+ and `tuple` is always used as the *tuple_factory*.
+
+ .. versionadded:: 21.3.0
+ """
+ return _astuple(
+ inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
+ )
+
+
+def inspect(cls):
+ """
+ Inspect the class and return its effective build parameters.
+
+ Warning:
+ This feature is currently **experimental** and is not covered by our
+ strict backwards-compatibility guarantees.
+
+ Args:
+ cls: The *attrs*-decorated class to inspect.
+
+ Returns:
+ The effective build parameters of the class.
+
+ Raises:
+ NotAnAttrsClassError: If the class is not an *attrs*-decorated class.
+
+ .. versionadded:: 25.4.0
+ """
+ try:
+ return cls.__dict__["__attrs_props__"]
+ except KeyError:
+ msg = f"{cls!r} is not an attrs-decorated class."
+ raise NotAnAttrsClassError(msg) from None
diff --git a/py311/lib/python3.11/site-packages/attr/_typing_compat.pyi b/py311/lib/python3.11/site-packages/attr/_typing_compat.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..ca7b71e906a28f88726bbd342fdfe636af0281e7
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/_typing_compat.pyi
@@ -0,0 +1,15 @@
+from typing import Any, ClassVar, Protocol
+
+# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`.
+MYPY = False
+
+if MYPY:
+ # A protocol to be able to statically accept an attrs class.
+ class AttrsInstance_(Protocol):
+ __attrs_attrs__: ClassVar[Any]
+
+else:
+ # For type checkers without plug-in support use an empty protocol that
+ # will (hopefully) be combined into a union.
+ class AttrsInstance_(Protocol):
+ pass
diff --git a/py311/lib/python3.11/site-packages/attr/_version_info.py b/py311/lib/python3.11/site-packages/attr/_version_info.py
new file mode 100644
index 0000000000000000000000000000000000000000..27f18884ad4d7a42f338e31a8ea19bc5d19999cd
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/_version_info.py
@@ -0,0 +1,89 @@
+# SPDX-License-Identifier: MIT
+
+
+from functools import total_ordering
+
+from ._funcs import astuple
+from ._make import attrib, attrs
+
+
+@total_ordering
+@attrs(eq=False, order=False, slots=True, frozen=True)
+class VersionInfo:
+ """
+ A version object that can be compared to tuple of length 1--4:
+
+ >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
+ True
+ >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
+ True
+ >>> vi = attr.VersionInfo(19, 2, 0, "final")
+ >>> vi < (19, 1, 1)
+ False
+ >>> vi < (19,)
+ False
+ >>> vi == (19, 2,)
+ True
+ >>> vi == (19, 2, 1)
+ False
+
+ .. versionadded:: 19.2
+ """
+
+ year = attrib(type=int)
+ minor = attrib(type=int)
+ micro = attrib(type=int)
+ releaselevel = attrib(type=str)
+
+ @classmethod
+ def _from_version_string(cls, s):
+ """
+ Parse *s* and return a _VersionInfo.
+ """
+ v = s.split(".")
+ if len(v) == 3:
+ v.append("final")
+
+ return cls(
+ year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
+ )
+
+ def _ensure_tuple(self, other):
+ """
+ Ensure *other* is a tuple of a valid length.
+
+ Returns a possibly transformed *other* and ourselves as a tuple of
+ the same length as *other*.
+ """
+
+ if self.__class__ is other.__class__:
+ other = astuple(other)
+
+ if not isinstance(other, tuple):
+ raise NotImplementedError
+
+ if not (1 <= len(other) <= 4):
+ raise NotImplementedError
+
+ return astuple(self)[: len(other)], other
+
+ def __eq__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ return us == them
+
+ def __lt__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
+ # have to do anything special with releaselevel for now.
+ return us < them
+
+ def __hash__(self):
+ return hash((self.year, self.minor, self.micro, self.releaselevel))
diff --git a/py311/lib/python3.11/site-packages/attr/_version_info.pyi b/py311/lib/python3.11/site-packages/attr/_version_info.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..45ced086337783c4b73b26cd17d2c1c260e24029
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/_version_info.pyi
@@ -0,0 +1,9 @@
+class VersionInfo:
+ @property
+ def year(self) -> int: ...
+ @property
+ def minor(self) -> int: ...
+ @property
+ def micro(self) -> int: ...
+ @property
+ def releaselevel(self) -> str: ...
diff --git a/py311/lib/python3.11/site-packages/attr/converters.py b/py311/lib/python3.11/site-packages/attr/converters.py
new file mode 100644
index 0000000000000000000000000000000000000000..0a79deef04282fb33a42f6aca59563d49e70d4cb
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/converters.py
@@ -0,0 +1,162 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful converters.
+"""
+
+import typing
+
+from ._compat import _AnnotationExtractor
+from ._make import NOTHING, Converter, Factory, pipe
+
+
+__all__ = [
+ "default_if_none",
+ "optional",
+ "pipe",
+ "to_bool",
+]
+
+
+def optional(converter):
+ """
+ A converter that allows an attribute to be optional. An optional attribute
+ is one which can be set to `None`.
+
+ Type annotations will be inferred from the wrapped converter's, if it has
+ any.
+
+ Args:
+ converter (typing.Callable):
+ the converter that is used for non-`None` values.
+
+ .. versionadded:: 17.1.0
+ """
+
+ if isinstance(converter, Converter):
+
+ def optional_converter(val, inst, field):
+ if val is None:
+ return None
+ return converter(val, inst, field)
+
+ else:
+
+ def optional_converter(val):
+ if val is None:
+ return None
+ return converter(val)
+
+ xtr = _AnnotationExtractor(converter)
+
+ t = xtr.get_first_param_type()
+ if t:
+ optional_converter.__annotations__["val"] = typing.Optional[t]
+
+ rt = xtr.get_return_type()
+ if rt:
+ optional_converter.__annotations__["return"] = typing.Optional[rt]
+
+ if isinstance(converter, Converter):
+ return Converter(optional_converter, takes_self=True, takes_field=True)
+
+ return optional_converter
+
+
+def default_if_none(default=NOTHING, factory=None):
+ """
+ A converter that allows to replace `None` values by *default* or the result
+ of *factory*.
+
+ Args:
+ default:
+ Value to be used if `None` is passed. Passing an instance of
+ `attrs.Factory` is supported, however the ``takes_self`` option is
+ *not*.
+
+ factory (typing.Callable):
+ A callable that takes no parameters whose result is used if `None`
+ is passed.
+
+ Raises:
+ TypeError: If **neither** *default* or *factory* is passed.
+
+ TypeError: If **both** *default* and *factory* are passed.
+
+ ValueError:
+ If an instance of `attrs.Factory` is passed with
+ ``takes_self=True``.
+
+ .. versionadded:: 18.2.0
+ """
+ if default is NOTHING and factory is None:
+ msg = "Must pass either `default` or `factory`."
+ raise TypeError(msg)
+
+ if default is not NOTHING and factory is not None:
+ msg = "Must pass either `default` or `factory` but not both."
+ raise TypeError(msg)
+
+ if factory is not None:
+ default = Factory(factory)
+
+ if isinstance(default, Factory):
+ if default.takes_self:
+ msg = "`takes_self` is not supported by default_if_none."
+ raise ValueError(msg)
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default.factory()
+
+ else:
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default
+
+ return default_if_none_converter
+
+
+def to_bool(val):
+ """
+ Convert "boolean" strings (for example, from environment variables) to real
+ booleans.
+
+ Values mapping to `True`:
+
+ - ``True``
+ - ``"true"`` / ``"t"``
+ - ``"yes"`` / ``"y"``
+ - ``"on"``
+ - ``"1"``
+ - ``1``
+
+ Values mapping to `False`:
+
+ - ``False``
+ - ``"false"`` / ``"f"``
+ - ``"no"`` / ``"n"``
+ - ``"off"``
+ - ``"0"``
+ - ``0``
+
+ Raises:
+ ValueError: For any other value.
+
+ .. versionadded:: 21.3.0
+ """
+ if isinstance(val, str):
+ val = val.lower()
+
+ if val in (True, "true", "t", "yes", "y", "on", "1", 1):
+ return True
+ if val in (False, "false", "f", "no", "n", "off", "0", 0):
+ return False
+
+ msg = f"Cannot convert value to bool: {val!r}"
+ raise ValueError(msg)
diff --git a/py311/lib/python3.11/site-packages/attr/converters.pyi b/py311/lib/python3.11/site-packages/attr/converters.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..12bd0c4f17bdc60fb8904598af0a3d56d5874a9e
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/converters.pyi
@@ -0,0 +1,19 @@
+from typing import Callable, Any, overload
+
+from attrs import _ConverterType, _CallableConverterType
+
+@overload
+def pipe(*validators: _CallableConverterType) -> _CallableConverterType: ...
+@overload
+def pipe(*validators: _ConverterType) -> _ConverterType: ...
+@overload
+def optional(converter: _CallableConverterType) -> _CallableConverterType: ...
+@overload
+def optional(converter: _ConverterType) -> _ConverterType: ...
+@overload
+def default_if_none(default: Any) -> _CallableConverterType: ...
+@overload
+def default_if_none(
+ *, factory: Callable[[], Any]
+) -> _CallableConverterType: ...
+def to_bool(val: str | int | bool) -> bool: ...
diff --git a/py311/lib/python3.11/site-packages/attr/exceptions.py b/py311/lib/python3.11/site-packages/attr/exceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..3b7abb8154108aa1d0ae52fa9ee8e489f05b5563
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/exceptions.py
@@ -0,0 +1,95 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import annotations
+
+from typing import ClassVar
+
+
+class FrozenError(AttributeError):
+ """
+ A frozen/immutable instance or attribute have been attempted to be
+ modified.
+
+ It mirrors the behavior of ``namedtuples`` by using the same error message
+ and subclassing `AttributeError`.
+
+ .. versionadded:: 20.1.0
+ """
+
+ msg = "can't set attribute"
+ args: ClassVar[tuple[str]] = [msg]
+
+
+class FrozenInstanceError(FrozenError):
+ """
+ A frozen instance has been attempted to be modified.
+
+ .. versionadded:: 16.1.0
+ """
+
+
+class FrozenAttributeError(FrozenError):
+ """
+ A frozen attribute has been attempted to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+
+
+class AttrsAttributeNotFoundError(ValueError):
+ """
+ An *attrs* function couldn't find an attribute that the user asked for.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class NotAnAttrsClassError(ValueError):
+ """
+ A non-*attrs* class has been passed into an *attrs* function.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class DefaultAlreadySetError(RuntimeError):
+ """
+ A default has been set when defining the field and is attempted to be reset
+ using the decorator.
+
+ .. versionadded:: 17.1.0
+ """
+
+
+class UnannotatedAttributeError(RuntimeError):
+ """
+ A class with ``auto_attribs=True`` has a field without a type annotation.
+
+ .. versionadded:: 17.3.0
+ """
+
+
+class PythonTooOldError(RuntimeError):
+ """
+ It was attempted to use an *attrs* feature that requires a newer Python
+ version.
+
+ .. versionadded:: 18.2.0
+ """
+
+
+class NotCallableError(TypeError):
+ """
+ A field requiring a callable has been set with a value that is not
+ callable.
+
+ .. versionadded:: 19.2.0
+ """
+
+ def __init__(self, msg, value):
+ super(TypeError, self).__init__(msg, value)
+ self.msg = msg
+ self.value = value
+
+ def __str__(self):
+ return str(self.msg)
diff --git a/py311/lib/python3.11/site-packages/attr/exceptions.pyi b/py311/lib/python3.11/site-packages/attr/exceptions.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..f2680118b404db8f5227d04d27e8439331341c4d
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/exceptions.pyi
@@ -0,0 +1,17 @@
+from typing import Any
+
+class FrozenError(AttributeError):
+ msg: str = ...
+
+class FrozenInstanceError(FrozenError): ...
+class FrozenAttributeError(FrozenError): ...
+class AttrsAttributeNotFoundError(ValueError): ...
+class NotAnAttrsClassError(ValueError): ...
+class DefaultAlreadySetError(RuntimeError): ...
+class UnannotatedAttributeError(RuntimeError): ...
+class PythonTooOldError(RuntimeError): ...
+
+class NotCallableError(TypeError):
+ msg: str = ...
+ value: Any = ...
+ def __init__(self, msg: str, value: Any) -> None: ...
diff --git a/py311/lib/python3.11/site-packages/attr/filters.py b/py311/lib/python3.11/site-packages/attr/filters.py
new file mode 100644
index 0000000000000000000000000000000000000000..689b1705a60ff110d6077bab996f8b4588e55b82
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/filters.py
@@ -0,0 +1,72 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful filters for `attrs.asdict` and `attrs.astuple`.
+"""
+
+from ._make import Attribute
+
+
+def _split_what(what):
+ """
+ Returns a tuple of `frozenset`s of classes and attributes.
+ """
+ return (
+ frozenset(cls for cls in what if isinstance(cls, type)),
+ frozenset(cls for cls in what if isinstance(cls, str)),
+ frozenset(cls for cls in what if isinstance(cls, Attribute)),
+ )
+
+
+def include(*what):
+ """
+ Create a filter that only allows *what*.
+
+ Args:
+ what (list[type, str, attrs.Attribute]):
+ What to include. Can be a type, a name, or an attribute.
+
+ Returns:
+ Callable:
+ A callable that can be passed to `attrs.asdict`'s and
+ `attrs.astuple`'s *filter* argument.
+
+ .. versionchanged:: 23.1.0 Accept strings with field names.
+ """
+ cls, names, attrs = _split_what(what)
+
+ def include_(attribute, value):
+ return (
+ value.__class__ in cls
+ or attribute.name in names
+ or attribute in attrs
+ )
+
+ return include_
+
+
+def exclude(*what):
+ """
+ Create a filter that does **not** allow *what*.
+
+ Args:
+ what (list[type, str, attrs.Attribute]):
+ What to exclude. Can be a type, a name, or an attribute.
+
+ Returns:
+ Callable:
+ A callable that can be passed to `attrs.asdict`'s and
+ `attrs.astuple`'s *filter* argument.
+
+ .. versionchanged:: 23.3.0 Accept field name string as input argument
+ """
+ cls, names, attrs = _split_what(what)
+
+ def exclude_(attribute, value):
+ return not (
+ value.__class__ in cls
+ or attribute.name in names
+ or attribute in attrs
+ )
+
+ return exclude_
diff --git a/py311/lib/python3.11/site-packages/attr/filters.pyi b/py311/lib/python3.11/site-packages/attr/filters.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..974abdcdb51152393d9c9e460c21aa025c45880c
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/filters.pyi
@@ -0,0 +1,6 @@
+from typing import Any
+
+from . import Attribute, _FilterType
+
+def include(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ...
+def exclude(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ...
diff --git a/py311/lib/python3.11/site-packages/attr/py.typed b/py311/lib/python3.11/site-packages/attr/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/py311/lib/python3.11/site-packages/attr/setters.py b/py311/lib/python3.11/site-packages/attr/setters.py
new file mode 100644
index 0000000000000000000000000000000000000000..78b08398a6713fc5fa827c2dc853e0d05de743c4
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/setters.py
@@ -0,0 +1,79 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly used hooks for on_setattr.
+"""
+
+from . import _config
+from .exceptions import FrozenAttributeError
+
+
+def pipe(*setters):
+ """
+ Run all *setters* and return the return value of the last one.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def wrapped_pipe(instance, attrib, new_value):
+ rv = new_value
+
+ for setter in setters:
+ rv = setter(instance, attrib, rv)
+
+ return rv
+
+ return wrapped_pipe
+
+
+def frozen(_, __, ___):
+ """
+ Prevent an attribute to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+ raise FrozenAttributeError
+
+
+def validate(instance, attrib, new_value):
+ """
+ Run *attrib*'s validator on *new_value* if it has one.
+
+ .. versionadded:: 20.1.0
+ """
+ if _config._run_validators is False:
+ return new_value
+
+ v = attrib.validator
+ if not v:
+ return new_value
+
+ v(instance, attrib, new_value)
+
+ return new_value
+
+
+def convert(instance, attrib, new_value):
+ """
+ Run *attrib*'s converter -- if it has one -- on *new_value* and return the
+ result.
+
+ .. versionadded:: 20.1.0
+ """
+ c = attrib.converter
+ if c:
+ # This can be removed once we drop 3.8 and use attrs.Converter instead.
+ from ._make import Converter
+
+ if not isinstance(c, Converter):
+ return c(new_value)
+
+ return c(new_value, instance, attrib)
+
+ return new_value
+
+
+# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
+# Sphinx's autodata stopped working, so the docstring is inlined in the API
+# docs.
+NO_OP = object()
diff --git a/py311/lib/python3.11/site-packages/attr/setters.pyi b/py311/lib/python3.11/site-packages/attr/setters.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..73abf36e7d5b0f5f56e7fddeee716824c1c60d58
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/setters.pyi
@@ -0,0 +1,20 @@
+from typing import Any, NewType, NoReturn, TypeVar
+
+from . import Attribute
+from attrs import _OnSetAttrType
+
+_T = TypeVar("_T")
+
+def frozen(
+ instance: Any, attribute: Attribute[Any], new_value: Any
+) -> NoReturn: ...
+def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
+def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
+
+# convert is allowed to return Any, because they can be chained using pipe.
+def convert(
+ instance: Any, attribute: Attribute[Any], new_value: Any
+) -> Any: ...
+
+_NoOpType = NewType("_NoOpType", object)
+NO_OP: _NoOpType
diff --git a/py311/lib/python3.11/site-packages/attr/validators.py b/py311/lib/python3.11/site-packages/attr/validators.py
new file mode 100644
index 0000000000000000000000000000000000000000..837e003b6c2e5ce485d0b1cd19bfa398f18a8d18
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/validators.py
@@ -0,0 +1,748 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful validators.
+"""
+
+import operator
+import re
+
+from contextlib import contextmanager
+from re import Pattern
+
+from ._config import get_run_validators, set_run_validators
+from ._make import _AndValidator, and_, attrib, attrs
+from .converters import default_if_none
+from .exceptions import NotCallableError
+
+
+__all__ = [
+ "and_",
+ "deep_iterable",
+ "deep_mapping",
+ "disabled",
+ "ge",
+ "get_disabled",
+ "gt",
+ "in_",
+ "instance_of",
+ "is_callable",
+ "le",
+ "lt",
+ "matches_re",
+ "max_len",
+ "min_len",
+ "not_",
+ "optional",
+ "or_",
+ "set_disabled",
+]
+
+
+def set_disabled(disabled):
+ """
+ Globally disable or enable running validators.
+
+ By default, they are run.
+
+ Args:
+ disabled (bool): If `True`, disable running all validators.
+
+ .. warning::
+
+ This function is not thread-safe!
+
+ .. versionadded:: 21.3.0
+ """
+ set_run_validators(not disabled)
+
+
+def get_disabled():
+ """
+ Return a bool indicating whether validators are currently disabled or not.
+
+ Returns:
+ bool:`True` if validators are currently disabled.
+
+ .. versionadded:: 21.3.0
+ """
+ return not get_run_validators()
+
+
+@contextmanager
+def disabled():
+ """
+ Context manager that disables running validators within its context.
+
+ .. warning::
+
+ This context manager is not thread-safe!
+
+ .. versionadded:: 21.3.0
+ """
+ set_run_validators(False)
+ try:
+ yield
+ finally:
+ set_run_validators(True)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _InstanceOfValidator:
+ type = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not isinstance(value, self.type):
+ msg = f"'{attr.name}' must be {self.type!r} (got {value!r} that is a {value.__class__!r})."
+ raise TypeError(
+ msg,
+ attr,
+ self.type,
+ value,
+ )
+
+ def __repr__(self):
+ return f""
+
+
+def instance_of(type):
+ """
+ A validator that raises a `TypeError` if the initializer is called with a
+ wrong type for this particular attribute (checks are performed using
+ `isinstance` therefore it's also valid to pass a tuple of types).
+
+ Args:
+ type (type | tuple[type]): The type to check for.
+
+ Raises:
+ TypeError:
+ With a human readable error message, the attribute (of type
+ `attrs.Attribute`), the expected type, and the value it got.
+ """
+ return _InstanceOfValidator(type)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MatchesReValidator:
+ pattern = attrib()
+ match_func = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.match_func(value):
+ msg = f"'{attr.name}' must match regex {self.pattern.pattern!r} ({value!r} doesn't)"
+ raise ValueError(
+ msg,
+ attr,
+ self.pattern,
+ value,
+ )
+
+ def __repr__(self):
+ return f""
+
+
+def matches_re(regex, flags=0, func=None):
+ r"""
+ A validator that raises `ValueError` if the initializer is called with a
+ string that doesn't match *regex*.
+
+ Args:
+ regex (str, re.Pattern):
+ A regex string or precompiled pattern to match against
+
+ flags (int):
+ Flags that will be passed to the underlying re function (default 0)
+
+ func (typing.Callable):
+ Which underlying `re` function to call. Valid options are
+ `re.fullmatch`, `re.search`, and `re.match`; the default `None`
+ means `re.fullmatch`. For performance reasons, the pattern is
+ always precompiled using `re.compile`.
+
+ .. versionadded:: 19.2.0
+ .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
+ """
+ valid_funcs = (re.fullmatch, None, re.search, re.match)
+ if func not in valid_funcs:
+ msg = "'func' must be one of {}.".format(
+ ", ".join(
+ sorted((e and e.__name__) or "None" for e in set(valid_funcs))
+ )
+ )
+ raise ValueError(msg)
+
+ if isinstance(regex, Pattern):
+ if flags:
+ msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead"
+ raise TypeError(msg)
+ pattern = regex
+ else:
+ pattern = re.compile(regex, flags)
+
+ if func is re.match:
+ match_func = pattern.match
+ elif func is re.search:
+ match_func = pattern.search
+ else:
+ match_func = pattern.fullmatch
+
+ return _MatchesReValidator(pattern, match_func)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _OptionalValidator:
+ validator = attrib()
+
+ def __call__(self, inst, attr, value):
+ if value is None:
+ return
+
+ self.validator(inst, attr, value)
+
+ def __repr__(self):
+ return f""
+
+
+def optional(validator):
+ """
+ A validator that makes an attribute optional. An optional attribute is one
+ which can be set to `None` in addition to satisfying the requirements of
+ the sub-validator.
+
+ Args:
+ validator
+ (typing.Callable | tuple[typing.Callable] | list[typing.Callable]):
+ A validator (or validators) that is used for non-`None` values.
+
+ .. versionadded:: 15.1.0
+ .. versionchanged:: 17.1.0 *validator* can be a list of validators.
+ .. versionchanged:: 23.1.0 *validator* can also be a tuple of validators.
+ """
+ if isinstance(validator, (list, tuple)):
+ return _OptionalValidator(_AndValidator(validator))
+
+ return _OptionalValidator(validator)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _InValidator:
+ options = attrib()
+ _original_options = attrib(hash=False)
+
+ def __call__(self, inst, attr, value):
+ try:
+ in_options = value in self.options
+ except TypeError: # e.g. `1 in "abc"`
+ in_options = False
+
+ if not in_options:
+ msg = f"'{attr.name}' must be in {self._original_options!r} (got {value!r})"
+ raise ValueError(
+ msg,
+ attr,
+ self._original_options,
+ value,
+ )
+
+ def __repr__(self):
+ return f""
+
+
+def in_(options):
+ """
+ A validator that raises a `ValueError` if the initializer is called with a
+ value that does not belong in the *options* provided.
+
+ The check is performed using ``value in options``, so *options* has to
+ support that operation.
+
+ To keep the validator hashable, dicts, lists, and sets are transparently
+ transformed into a `tuple`.
+
+ Args:
+ options: Allowed options.
+
+ Raises:
+ ValueError:
+ With a human readable error message, the attribute (of type
+ `attrs.Attribute`), the expected options, and the value it got.
+
+ .. versionadded:: 17.1.0
+ .. versionchanged:: 22.1.0
+ The ValueError was incomplete until now and only contained the human
+ readable error message. Now it contains all the information that has
+ been promised since 17.1.0.
+ .. versionchanged:: 24.1.0
+ *options* that are a list, dict, or a set are now transformed into a
+ tuple to keep the validator hashable.
+ """
+ repr_options = options
+ if isinstance(options, (list, dict, set)):
+ options = tuple(options)
+
+ return _InValidator(options, repr_options)
+
+
+@attrs(repr=False, slots=False, unsafe_hash=True)
+class _IsCallableValidator:
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not callable(value):
+ message = (
+ "'{name}' must be callable "
+ "(got {value!r} that is a {actual!r})."
+ )
+ raise NotCallableError(
+ msg=message.format(
+ name=attr.name, value=value, actual=value.__class__
+ ),
+ value=value,
+ )
+
+ def __repr__(self):
+ return ""
+
+
+def is_callable():
+ """
+ A validator that raises a `attrs.exceptions.NotCallableError` if the
+ initializer is called with a value for this particular attribute that is
+ not callable.
+
+ .. versionadded:: 19.1.0
+
+ Raises:
+ attrs.exceptions.NotCallableError:
+ With a human readable error message containing the attribute
+ (`attrs.Attribute`) name, and the value it got.
+ """
+ return _IsCallableValidator()
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _DeepIterable:
+ member_validator = attrib(validator=is_callable())
+ iterable_validator = attrib(
+ default=None, validator=optional(is_callable())
+ )
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.iterable_validator is not None:
+ self.iterable_validator(inst, attr, value)
+
+ for member in value:
+ self.member_validator(inst, attr, member)
+
+ def __repr__(self):
+ iterable_identifier = (
+ ""
+ if self.iterable_validator is None
+ else f" {self.iterable_validator!r}"
+ )
+ return (
+ f""
+ )
+
+
+def deep_iterable(member_validator, iterable_validator=None):
+ """
+ A validator that performs deep validation of an iterable.
+
+ Args:
+ member_validator: Validator(s) to apply to iterable members.
+
+ iterable_validator:
+ Validator(s) to apply to iterable itself (optional).
+
+ Raises
+ TypeError: if any sub-validators fail
+
+ .. versionadded:: 19.1.0
+
+ .. versionchanged:: 25.4.0
+ *member_validator* and *iterable_validator* can now be a list or tuple
+ of validators.
+ """
+ if isinstance(member_validator, (list, tuple)):
+ member_validator = and_(*member_validator)
+ if isinstance(iterable_validator, (list, tuple)):
+ iterable_validator = and_(*iterable_validator)
+ return _DeepIterable(member_validator, iterable_validator)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _DeepMapping:
+ key_validator = attrib(validator=optional(is_callable()))
+ value_validator = attrib(validator=optional(is_callable()))
+ mapping_validator = attrib(validator=optional(is_callable()))
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.mapping_validator is not None:
+ self.mapping_validator(inst, attr, value)
+
+ for key in value:
+ if self.key_validator is not None:
+ self.key_validator(inst, attr, key)
+ if self.value_validator is not None:
+ self.value_validator(inst, attr, value[key])
+
+ def __repr__(self):
+ return f""
+
+
+def deep_mapping(
+ key_validator=None, value_validator=None, mapping_validator=None
+):
+ """
+ A validator that performs deep validation of a dictionary.
+
+ All validators are optional, but at least one of *key_validator* or
+ *value_validator* must be provided.
+
+ Args:
+ key_validator: Validator(s) to apply to dictionary keys.
+
+ value_validator: Validator(s) to apply to dictionary values.
+
+ mapping_validator:
+ Validator(s) to apply to top-level mapping attribute.
+
+ .. versionadded:: 19.1.0
+
+ .. versionchanged:: 25.4.0
+ *key_validator* and *value_validator* are now optional, but at least one
+ of them must be provided.
+
+ .. versionchanged:: 25.4.0
+ *key_validator*, *value_validator*, and *mapping_validator* can now be a
+ list or tuple of validators.
+
+ Raises:
+ TypeError: If any sub-validator fails on validation.
+
+ ValueError:
+ If neither *key_validator* nor *value_validator* is provided on
+ instantiation.
+ """
+ if key_validator is None and value_validator is None:
+ msg = (
+ "At least one of key_validator or value_validator must be provided"
+ )
+ raise ValueError(msg)
+
+ if isinstance(key_validator, (list, tuple)):
+ key_validator = and_(*key_validator)
+ if isinstance(value_validator, (list, tuple)):
+ value_validator = and_(*value_validator)
+ if isinstance(mapping_validator, (list, tuple)):
+ mapping_validator = and_(*mapping_validator)
+
+ return _DeepMapping(key_validator, value_validator, mapping_validator)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _NumberValidator:
+ bound = attrib()
+ compare_op = attrib()
+ compare_func = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.compare_func(value, self.bound):
+ msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}"
+ raise ValueError(msg)
+
+ def __repr__(self):
+ return f""
+
+
+def lt(val):
+ """
+ A validator that raises `ValueError` if the initializer is called with a
+ number larger or equal to *val*.
+
+ The validator uses `operator.lt` to compare the values.
+
+ Args:
+ val: Exclusive upper bound for values.
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, "<", operator.lt)
+
+
+def le(val):
+ """
+ A validator that raises `ValueError` if the initializer is called with a
+ number greater than *val*.
+
+ The validator uses `operator.le` to compare the values.
+
+ Args:
+ val: Inclusive upper bound for values.
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, "<=", operator.le)
+
+
+def ge(val):
+ """
+ A validator that raises `ValueError` if the initializer is called with a
+ number smaller than *val*.
+
+ The validator uses `operator.ge` to compare the values.
+
+ Args:
+ val: Inclusive lower bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, ">=", operator.ge)
+
+
+def gt(val):
+ """
+ A validator that raises `ValueError` if the initializer is called with a
+ number smaller or equal to *val*.
+
+ The validator uses `operator.gt` to compare the values.
+
+ Args:
+ val: Exclusive lower bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, ">", operator.gt)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MaxLengthValidator:
+ max_length = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if len(value) > self.max_length:
+ msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}"
+ raise ValueError(msg)
+
+ def __repr__(self):
+ return f""
+
+
+def max_len(length):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a string or iterable that is longer than *length*.
+
+ Args:
+ length (int): Maximum length of the string or iterable
+
+ .. versionadded:: 21.3.0
+ """
+ return _MaxLengthValidator(length)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MinLengthValidator:
+ min_length = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if len(value) < self.min_length:
+ msg = f"Length of '{attr.name}' must be >= {self.min_length}: {len(value)}"
+ raise ValueError(msg)
+
+ def __repr__(self):
+ return f""
+
+
+def min_len(length):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a string or iterable that is shorter than *length*.
+
+ Args:
+ length (int): Minimum length of the string or iterable
+
+ .. versionadded:: 22.1.0
+ """
+ return _MinLengthValidator(length)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _SubclassOfValidator:
+ type = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not issubclass(value, self.type):
+ msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})."
+ raise TypeError(
+ msg,
+ attr,
+ self.type,
+ value,
+ )
+
+ def __repr__(self):
+ return f""
+
+
+def _subclass_of(type):
+ """
+ A validator that raises a `TypeError` if the initializer is called with a
+ wrong type for this particular attribute (checks are performed using
+ `issubclass` therefore it's also valid to pass a tuple of types).
+
+ Args:
+ type (type | tuple[type, ...]): The type(s) to check for.
+
+ Raises:
+ TypeError:
+ With a human readable error message, the attribute (of type
+ `attrs.Attribute`), the expected type, and the value it got.
+ """
+ return _SubclassOfValidator(type)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _NotValidator:
+ validator = attrib()
+ msg = attrib(
+ converter=default_if_none(
+ "not_ validator child '{validator!r}' "
+ "did not raise a captured error"
+ )
+ )
+ exc_types = attrib(
+ validator=deep_iterable(
+ member_validator=_subclass_of(Exception),
+ iterable_validator=instance_of(tuple),
+ ),
+ )
+
+ def __call__(self, inst, attr, value):
+ try:
+ self.validator(inst, attr, value)
+ except self.exc_types:
+ pass # suppress error to invert validity
+ else:
+ raise ValueError(
+ self.msg.format(
+ validator=self.validator,
+ exc_types=self.exc_types,
+ ),
+ attr,
+ self.validator,
+ value,
+ self.exc_types,
+ )
+
+ def __repr__(self):
+ return f""
+
+
+def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)):
+ """
+ A validator that wraps and logically 'inverts' the validator passed to it.
+ It will raise a `ValueError` if the provided validator *doesn't* raise a
+ `ValueError` or `TypeError` (by default), and will suppress the exception
+ if the provided validator *does*.
+
+ Intended to be used with existing validators to compose logic without
+ needing to create inverted variants, for example, ``not_(in_(...))``.
+
+ Args:
+ validator: A validator to be logically inverted.
+
+ msg (str):
+ Message to raise if validator fails. Formatted with keys
+ ``exc_types`` and ``validator``.
+
+ exc_types (tuple[type, ...]):
+ Exception type(s) to capture. Other types raised by child
+ validators will not be intercepted and pass through.
+
+ Raises:
+ ValueError:
+ With a human readable error message, the attribute (of type
+ `attrs.Attribute`), the validator that failed to raise an
+ exception, the value it got, and the expected exception types.
+
+ .. versionadded:: 22.2.0
+ """
+ try:
+ exc_types = tuple(exc_types)
+ except TypeError:
+ exc_types = (exc_types,)
+ return _NotValidator(validator, msg, exc_types)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _OrValidator:
+ validators = attrib()
+
+ def __call__(self, inst, attr, value):
+ for v in self.validators:
+ try:
+ v(inst, attr, value)
+ except Exception: # noqa: BLE001, PERF203, S112
+ continue
+ else:
+ return
+
+ msg = f"None of {self.validators!r} satisfied for value {value!r}"
+ raise ValueError(msg)
+
+ def __repr__(self):
+ return f""
+
+
+def or_(*validators):
+ """
+ A validator that composes multiple validators into one.
+
+ When called on a value, it runs all wrapped validators until one of them is
+ satisfied.
+
+ Args:
+ validators (~collections.abc.Iterable[typing.Callable]):
+ Arbitrary number of validators.
+
+ Raises:
+ ValueError:
+ If no validator is satisfied. Raised with a human-readable error
+ message listing all the wrapped validators and the value that
+ failed all of them.
+
+ .. versionadded:: 24.1.0
+ """
+ vals = []
+ for v in validators:
+ vals.extend(v.validators if isinstance(v, _OrValidator) else [v])
+
+ return _OrValidator(tuple(vals))
diff --git a/py311/lib/python3.11/site-packages/attr/validators.pyi b/py311/lib/python3.11/site-packages/attr/validators.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..36a7e800c237f46755ddfca306f8d867a7f493db
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attr/validators.pyi
@@ -0,0 +1,140 @@
+from types import UnionType
+from typing import (
+ Any,
+ AnyStr,
+ Callable,
+ Container,
+ ContextManager,
+ Iterable,
+ Mapping,
+ Match,
+ Pattern,
+ TypeVar,
+ overload,
+)
+
+from attrs import _ValidatorType
+from attrs import _ValidatorArgType
+
+_T = TypeVar("_T")
+_T1 = TypeVar("_T1")
+_T2 = TypeVar("_T2")
+_T3 = TypeVar("_T3")
+_T4 = TypeVar("_T4")
+_T5 = TypeVar("_T5")
+_T6 = TypeVar("_T6")
+_I = TypeVar("_I", bound=Iterable)
+_K = TypeVar("_K")
+_V = TypeVar("_V")
+_M = TypeVar("_M", bound=Mapping)
+
+def set_disabled(run: bool) -> None: ...
+def get_disabled() -> bool: ...
+def disabled() -> ContextManager[None]: ...
+
+# To be more precise on instance_of use some overloads.
+# If there are more than 3 items in the tuple then we fall back to Any
+@overload
+def instance_of(type: type[_T]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(type: tuple[type[_T]]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(
+ type: tuple[type[_T1], type[_T2]],
+) -> _ValidatorType[_T1 | _T2]: ...
+@overload
+def instance_of(
+ type: tuple[type[_T1], type[_T2], type[_T3]],
+) -> _ValidatorType[_T1 | _T2 | _T3]: ...
+@overload
+def instance_of(type: tuple[type, ...]) -> _ValidatorType[Any]: ...
+@overload
+def instance_of(type: UnionType) -> _ValidatorType[Any]: ...
+def optional(
+ validator: (
+ _ValidatorType[_T]
+ | list[_ValidatorType[_T]]
+ | tuple[_ValidatorType[_T]]
+ ),
+) -> _ValidatorType[_T | None]: ...
+def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
+def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
+def matches_re(
+ regex: Pattern[AnyStr] | AnyStr,
+ flags: int = ...,
+ func: Callable[[AnyStr, AnyStr, int], Match[AnyStr] | None] | None = ...,
+) -> _ValidatorType[AnyStr]: ...
+def deep_iterable(
+ member_validator: _ValidatorArgType[_T],
+ iterable_validator: _ValidatorArgType[_I] | None = ...,
+) -> _ValidatorType[_I]: ...
+@overload
+def deep_mapping(
+ key_validator: _ValidatorArgType[_K],
+ value_validator: _ValidatorArgType[_V] | None = ...,
+ mapping_validator: _ValidatorArgType[_M] | None = ...,
+) -> _ValidatorType[_M]: ...
+@overload
+def deep_mapping(
+ key_validator: _ValidatorArgType[_K] | None = ...,
+ value_validator: _ValidatorArgType[_V] = ...,
+ mapping_validator: _ValidatorArgType[_M] | None = ...,
+) -> _ValidatorType[_M]: ...
+def is_callable() -> _ValidatorType[_T]: ...
+def lt(val: _T) -> _ValidatorType[_T]: ...
+def le(val: _T) -> _ValidatorType[_T]: ...
+def ge(val: _T) -> _ValidatorType[_T]: ...
+def gt(val: _T) -> _ValidatorType[_T]: ...
+def max_len(length: int) -> _ValidatorType[_T]: ...
+def min_len(length: int) -> _ValidatorType[_T]: ...
+def not_(
+ validator: _ValidatorType[_T],
+ *,
+ msg: str | None = None,
+ exc_types: type[Exception] | Iterable[type[Exception]] = ...,
+) -> _ValidatorType[_T]: ...
+@overload
+def or_(
+ __v1: _ValidatorType[_T1],
+ __v2: _ValidatorType[_T2],
+) -> _ValidatorType[_T1 | _T2]: ...
+@overload
+def or_(
+ __v1: _ValidatorType[_T1],
+ __v2: _ValidatorType[_T2],
+ __v3: _ValidatorType[_T3],
+) -> _ValidatorType[_T1 | _T2 | _T3]: ...
+@overload
+def or_(
+ __v1: _ValidatorType[_T1],
+ __v2: _ValidatorType[_T2],
+ __v3: _ValidatorType[_T3],
+ __v4: _ValidatorType[_T4],
+) -> _ValidatorType[_T1 | _T2 | _T3 | _T4]: ...
+@overload
+def or_(
+ __v1: _ValidatorType[_T1],
+ __v2: _ValidatorType[_T2],
+ __v3: _ValidatorType[_T3],
+ __v4: _ValidatorType[_T4],
+ __v5: _ValidatorType[_T5],
+) -> _ValidatorType[_T1 | _T2 | _T3 | _T4 | _T5]: ...
+@overload
+def or_(
+ __v1: _ValidatorType[_T1],
+ __v2: _ValidatorType[_T2],
+ __v3: _ValidatorType[_T3],
+ __v4: _ValidatorType[_T4],
+ __v5: _ValidatorType[_T5],
+ __v6: _ValidatorType[_T6],
+) -> _ValidatorType[_T1 | _T2 | _T3 | _T4 | _T5 | _T6]: ...
+@overload
+def or_(
+ __v1: _ValidatorType[Any],
+ __v2: _ValidatorType[Any],
+ __v3: _ValidatorType[Any],
+ __v4: _ValidatorType[Any],
+ __v5: _ValidatorType[Any],
+ __v6: _ValidatorType[Any],
+ *validators: _ValidatorType[Any],
+) -> _ValidatorType[Any]: ...
diff --git a/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/INSTALLER b/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+uv
\ No newline at end of file
diff --git a/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/METADATA b/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..51128bb96f0b8fa07656422e8099343cfdadd4cd
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/METADATA
@@ -0,0 +1,235 @@
+Metadata-Version: 2.4
+Name: attrs
+Version: 25.4.0
+Summary: Classes Without Boilerplate
+Project-URL: Documentation, https://www.attrs.org/
+Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html
+Project-URL: GitHub, https://github.com/python-attrs/attrs
+Project-URL: Funding, https://github.com/sponsors/hynek
+Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi
+Author-email: Hynek Schlawack
+License-Expression: MIT
+License-File: LICENSE
+Keywords: attribute,boilerplate,class
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Typing :: Typed
+Requires-Python: >=3.9
+Description-Content-Type: text/markdown
+
+
+
+
+*attrs* is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka [dunder methods](https://www.attrs.org/en/latest/glossary.html#term-dunder-methods)).
+Trusted by NASA for [Mars missions since 2020](https://github.com/readme/featured/nasa-ingenuity-helicopter)!
+
+Its main goal is to help you to write **concise** and **correct** software without slowing down your code.
+
+
+## Sponsors
+
+*attrs* would not be possible without our [amazing sponsors](https://github.com/sponsors/hynek).
+Especially those generously supporting us at the *The Organization* tier and higher:
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Please consider joining them to help make attrs’s maintenance more sustainable!
+
+
+
+
+## Example
+
+*attrs* gives you a class decorator and a way to declaratively define the attributes on that class:
+
+
+
+```pycon
+>>> from attrs import asdict, define, make_class, Factory
+
+>>> @define
+... class SomeClass:
+... a_number: int = 42
+... list_of_numbers: list[int] = Factory(list)
+...
+... def hard_math(self, another_number):
+... return self.a_number + sum(self.list_of_numbers) * another_number
+
+
+>>> sc = SomeClass(1, [1, 2, 3])
+>>> sc
+SomeClass(a_number=1, list_of_numbers=[1, 2, 3])
+
+>>> sc.hard_math(3)
+19
+>>> sc == SomeClass(1, [1, 2, 3])
+True
+>>> sc != SomeClass(2, [3, 2, 1])
+True
+
+>>> asdict(sc)
+{'a_number': 1, 'list_of_numbers': [1, 2, 3]}
+
+>>> SomeClass()
+SomeClass(a_number=42, list_of_numbers=[])
+
+>>> C = make_class("C", ["a", "b"])
+>>> C("foo", "bar")
+C(a='foo', b='bar')
+```
+
+After *declaring* your attributes, *attrs* gives you:
+
+- a concise and explicit overview of the class's attributes,
+- a nice human-readable `__repr__`,
+- equality-checking methods,
+- an initializer,
+- and much more,
+
+*without* writing dull boilerplate code again and again and *without* runtime performance penalties.
+
+---
+
+This example uses *attrs*'s modern APIs that have been introduced in version 20.1.0, and the *attrs* package import name that has been added in version 21.3.0.
+The classic APIs (`@attr.s`, `attr.ib`, plus their serious-business aliases) and the `attr` package import name will remain **indefinitely**.
+
+Check out [*On The Core API Names*](https://www.attrs.org/en/latest/names.html) for an in-depth explanation!
+
+
+### Hate Type Annotations!?
+
+No problem!
+Types are entirely **optional** with *attrs*.
+Simply assign `attrs.field()` to the attributes instead of annotating them with types:
+
+```python
+from attrs import define, field
+
+@define
+class SomeClass:
+ a_number = field(default=42)
+ list_of_numbers = field(factory=list)
+```
+
+
+## Data Classes
+
+On the tin, *attrs* might remind you of `dataclasses` (and indeed, `dataclasses` [are a descendant](https://hynek.me/articles/import-attrs/) of *attrs*).
+In practice it does a lot more and is more flexible.
+For instance, it allows you to define [special handling of NumPy arrays for equality checks](https://www.attrs.org/en/stable/comparison.html#customization), allows more ways to [plug into the initialization process](https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization), has a replacement for `__init_subclass__`, and allows for stepping through the generated methods using a debugger.
+
+For more details, please refer to our [comparison page](https://www.attrs.org/en/stable/why.html#data-classes), but generally speaking, we are more likely to commit crimes against nature to make things work that one would expect to work, but that are quite complicated in practice.
+
+
+## Project Information
+
+- [**Changelog**](https://www.attrs.org/en/stable/changelog.html)
+- [**Documentation**](https://www.attrs.org/)
+- [**PyPI**](https://pypi.org/project/attrs/)
+- [**Source Code**](https://github.com/python-attrs/attrs)
+- [**Contributing**](https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md)
+- [**Third-party Extensions**](https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs)
+- **Get Help**: use the `python-attrs` tag on [Stack Overflow](https://stackoverflow.com/questions/tagged/python-attrs)
+
+
+### *attrs* for Enterprise
+
+Available as part of the [Tidelift Subscription](https://tidelift.com/?utm_source=lifter&utm_medium=referral&utm_campaign=hynek).
+
+The maintainers of *attrs* and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications.
+Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use.
+
+## Release Information
+
+### Backwards-incompatible Changes
+
+- Class-level `kw_only=True` behavior is now consistent with `dataclasses`.
+
+ Previously, a class that sets `kw_only=True` makes all attributes keyword-only, including those from base classes.
+ If an attribute sets `kw_only=False`, that setting is ignored, and it is still made keyword-only.
+
+ Now, only the attributes defined in that class that doesn't explicitly set `kw_only=False` are made keyword-only.
+
+ This shouldn't be a problem for most users, unless you have a pattern like this:
+
+ ```python
+ @attrs.define(kw_only=True)
+ class Base:
+ a: int
+ b: int = attrs.field(default=1, kw_only=False)
+
+ @attrs.define
+ class Subclass(Base):
+ c: int
+ ```
+
+ Here, we have a `kw_only=True` *attrs* class (`Base`) with an attribute that sets `kw_only=False` and has a default (`Base.b`), and then create a subclass (`Subclass`) with required arguments (`Subclass.c`).
+ Previously this would work, since it would make `Base.b` keyword-only, but now this fails since `Base.b` is positional, and we have a required positional argument (`Subclass.c`) following another argument with defaults.
+ [#1457](https://github.com/python-attrs/attrs/issues/1457)
+
+
+### Changes
+
+- Values passed to the `__init__()` method of `attrs` classes are now correctly passed to `__attrs_pre_init__()` instead of their default values (in cases where *kw_only* was not specified).
+ [#1427](https://github.com/python-attrs/attrs/issues/1427)
+- Added support for Python 3.14 and [PEP 749](https://peps.python.org/pep-0749/).
+ [#1446](https://github.com/python-attrs/attrs/issues/1446),
+ [#1451](https://github.com/python-attrs/attrs/issues/1451)
+- `attrs.validators.deep_mapping()` now allows to leave out either *key_validator* xor *value_validator*.
+ [#1448](https://github.com/python-attrs/attrs/issues/1448)
+- `attrs.validators.deep_iterator()` and `attrs.validators.deep_mapping()` now accept lists and tuples for all validators and wrap them into a `attrs.validators.and_()`.
+ [#1449](https://github.com/python-attrs/attrs/issues/1449)
+- Added a new **experimental** way to inspect classes:
+
+ `attrs.inspect(cls)` returns the _effective_ class-wide parameters that were used by *attrs* to construct the class.
+
+ The returned class is the same data structure that *attrs* uses internally to decide how to construct the final class.
+ [#1454](https://github.com/python-attrs/attrs/issues/1454)
+- Fixed annotations for `attrs.field(converter=...)`.
+ Previously, a `tuple` of converters was only accepted if it had exactly one element.
+ [#1461](https://github.com/python-attrs/attrs/issues/1461)
+- The performance of `attrs.asdict()` has been improved by 45–260%.
+ [#1463](https://github.com/python-attrs/attrs/issues/1463)
+- The performance of `attrs.astuple()` has been improved by 49–270%.
+ [#1469](https://github.com/python-attrs/attrs/issues/1469)
+- The type annotation for `attrs.validators.or_()` now allows for different types of validators.
+
+ This was only an issue on Pyright.
+ [#1474](https://github.com/python-attrs/attrs/issues/1474)
+
+
+
+---
+
+[Full changelog →](https://www.attrs.org/en/stable/changelog.html)
diff --git a/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/RECORD b/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..1a6aed1657e9b79ba62e923da5fface8a24be5e2
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/RECORD
@@ -0,0 +1,37 @@
+attr/__init__.py,sha256=fOYIvt1eGSqQre4uCS3sJWKZ0mwAuC8UD6qba5OS9_U,2057
+attr/__init__.pyi,sha256=IZkzIjvtbRqDWGkDBIF9dd12FgDa379JYq3GHnVOvFQ,11309
+attr/_cmp.py,sha256=3Nn1TjxllUYiX_nJoVnEkXoDk0hM1DYKj5DE7GZe4i0,4117
+attr/_cmp.pyi,sha256=U-_RU_UZOyPUEQzXE6RMYQQcjkZRY25wTH99sN0s7MM,368
+attr/_compat.py,sha256=x0g7iEUOnBVJC72zyFCgb1eKqyxS-7f2LGnNyZ_r95s,2829
+attr/_config.py,sha256=dGq3xR6fgZEF6UBt_L0T-eUHIB4i43kRmH0P28sJVw8,843
+attr/_funcs.py,sha256=Ix5IETTfz5F01F-12MF_CSFomIn2h8b67EVVz2gCtBE,16479
+attr/_make.py,sha256=NRJDGS8syg2h3YNflVNoK2FwR3CpdSZxx8M6lacwljA,104141
+attr/_next_gen.py,sha256=BQtCUlzwg2gWHTYXBQvrEYBnzBUrDvO57u0Py6UCPhc,26274
+attr/_typing_compat.pyi,sha256=XDP54TUn-ZKhD62TOQebmzrwFyomhUCoGRpclb6alRA,469
+attr/_version_info.py,sha256=w4R-FYC3NK_kMkGUWJlYP4cVAlH9HRaC-um3fcjYkHM,2222
+attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209
+attr/converters.py,sha256=GlDeOzPeTFgeBBLbj9G57Ez5lAk68uhSALRYJ_exe84,3861
+attr/converters.pyi,sha256=orU2bff-VjQa2kMDyvnMQV73oJT2WRyQuw4ZR1ym1bE,643
+attr/exceptions.py,sha256=HRFq4iybmv7-DcZwyjl6M1euM2YeJVK_hFxuaBGAngI,1977
+attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539
+attr/filters.py,sha256=ZBiKWLp3R0LfCZsq7X11pn9WX8NslS2wXM4jsnLOGc8,1795
+attr/filters.pyi,sha256=3J5BG-dTxltBk1_-RuNRUHrv2qu1v8v4aDNAQ7_mifA,208
+attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+attr/setters.py,sha256=5-dcT63GQK35ONEzSgfXCkbB7pPkaR-qv15mm4PVSzQ,1617
+attr/setters.pyi,sha256=NnVkaFU1BB4JB8E4JuXyrzTUgvtMpj8p3wBdJY7uix4,584
+attr/validators.py,sha256=1BnYGTuYvSucGEI4ju-RPNJteVzG0ZlfWpJiWoSFHQ8,21458
+attr/validators.pyi,sha256=ftmW3m4KJ3pQcIXAj-BejT7BY4ZfqrC1G-5W7XvoPds,4082
+attrs-25.4.0.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2
+attrs-25.4.0.dist-info/METADATA,sha256=2Rerxj7agcMRxiwdkt6lC2guqHAmkGKCH13nWWK7ZoQ,10473
+attrs-25.4.0.dist-info/RECORD,,
+attrs-25.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+attrs-25.4.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
+attrs-25.4.0.dist-info/licenses/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109
+attrs/__init__.py,sha256=RxaAZNwYiEh-fcvHLZNpQ_DWKni73M_jxEPEftiq1Zc,1183
+attrs/__init__.pyi,sha256=2gV79g9UxJppGSM48hAZJ6h_MHb70dZoJL31ZNJeZYI,9416
+attrs/converters.py,sha256=8kQljrVwfSTRu8INwEk8SI0eGrzmWftsT7rM0EqyohM,76
+attrs/exceptions.py,sha256=ACCCmg19-vDFaDPY9vFl199SPXCQMN_bENs4DALjzms,76
+attrs/filters.py,sha256=VOUMZug9uEU6dUuA0dF1jInUK0PL3fLgP0VBS5d-CDE,73
+attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+attrs/setters.py,sha256=eL1YidYQV3T2h9_SYIZSZR1FAcHGb1TuCTy0E0Lv2SU,73
+attrs/validators.py,sha256=xcy6wD5TtTkdCG1f4XWbocPSO0faBjk5IfVJfP6SUj0,76
diff --git a/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/REQUESTED b/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/REQUESTED
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/WHEEL b/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..12228d414b6cfed7c39d3781c85c63256a1d7fb5
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: hatchling 1.27.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/licenses/LICENSE b/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/licenses/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..2bd6453d255e19b973f19b128596a8b6dd65b2c3
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attrs-25.4.0.dist-info/licenses/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Hynek Schlawack and the attrs contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/py311/lib/python3.11/site-packages/attrs/__init__.py b/py311/lib/python3.11/site-packages/attrs/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..dc1ce4b974ea1b3971ced44e3e112e897cf29b0a
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attrs/__init__.py
@@ -0,0 +1,72 @@
+# SPDX-License-Identifier: MIT
+
+from attr import (
+ NOTHING,
+ Attribute,
+ AttrsInstance,
+ Converter,
+ Factory,
+ NothingType,
+ _make_getattr,
+ assoc,
+ cmp_using,
+ define,
+ evolve,
+ field,
+ fields,
+ fields_dict,
+ frozen,
+ has,
+ make_class,
+ mutable,
+ resolve_types,
+ validate,
+)
+from attr._make import ClassProps
+from attr._next_gen import asdict, astuple, inspect
+
+from . import converters, exceptions, filters, setters, validators
+
+
+__all__ = [
+ "NOTHING",
+ "Attribute",
+ "AttrsInstance",
+ "ClassProps",
+ "Converter",
+ "Factory",
+ "NothingType",
+ "__author__",
+ "__copyright__",
+ "__description__",
+ "__doc__",
+ "__email__",
+ "__license__",
+ "__title__",
+ "__url__",
+ "__version__",
+ "__version_info__",
+ "asdict",
+ "assoc",
+ "astuple",
+ "cmp_using",
+ "converters",
+ "define",
+ "evolve",
+ "exceptions",
+ "field",
+ "fields",
+ "fields_dict",
+ "filters",
+ "frozen",
+ "has",
+ "inspect",
+ "make_class",
+ "mutable",
+ "resolve_types",
+ "setters",
+ "validate",
+ "validators",
+]
+
+__getattr__ = _make_getattr(__name__)
diff --git a/py311/lib/python3.11/site-packages/attrs/__init__.pyi b/py311/lib/python3.11/site-packages/attrs/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..6364bac4ea29f860934803e5f3ba741812c6dfd1
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attrs/__init__.pyi
@@ -0,0 +1,314 @@
+import sys
+
+from typing import (
+ Any,
+ Callable,
+ Mapping,
+ Sequence,
+ overload,
+ TypeVar,
+)
+
+# Because we need to type our own stuff, we have to make everything from
+# attr explicitly public too.
+from attr import __author__ as __author__
+from attr import __copyright__ as __copyright__
+from attr import __description__ as __description__
+from attr import __email__ as __email__
+from attr import __license__ as __license__
+from attr import __title__ as __title__
+from attr import __url__ as __url__
+from attr import __version__ as __version__
+from attr import __version_info__ as __version_info__
+from attr import assoc as assoc
+from attr import Attribute as Attribute
+from attr import AttrsInstance as AttrsInstance
+from attr import cmp_using as cmp_using
+from attr import converters as converters
+from attr import Converter as Converter
+from attr import evolve as evolve
+from attr import exceptions as exceptions
+from attr import Factory as Factory
+from attr import fields as fields
+from attr import fields_dict as fields_dict
+from attr import filters as filters
+from attr import has as has
+from attr import make_class as make_class
+from attr import NOTHING as NOTHING
+from attr import resolve_types as resolve_types
+from attr import setters as setters
+from attr import validate as validate
+from attr import validators as validators
+from attr import attrib, asdict as asdict, astuple as astuple
+from attr import NothingType as NothingType
+
+if sys.version_info >= (3, 11):
+ from typing import dataclass_transform
+else:
+ from typing_extensions import dataclass_transform
+
+_T = TypeVar("_T")
+_C = TypeVar("_C", bound=type)
+
+_EqOrderType = bool | Callable[[Any], Any]
+_ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any]
+_CallableConverterType = Callable[[Any], Any]
+_ConverterType = _CallableConverterType | Converter[Any, Any]
+_ReprType = Callable[[Any], str]
+_ReprArgType = bool | _ReprType
+_OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any]
+_OnSetAttrArgType = _OnSetAttrType | list[_OnSetAttrType] | setters._NoOpType
+_FieldTransformer = Callable[
+ [type, list["Attribute[Any]"]], list["Attribute[Any]"]
+]
+# FIXME: in reality, if multiple validators are passed they must be in a list
+# or tuple, but those are invariant and so would prevent subtypes of
+# _ValidatorType from working when passed in a list or tuple.
+_ValidatorArgType = _ValidatorType[_T] | Sequence[_ValidatorType[_T]]
+
+@overload
+def field(
+ *,
+ default: None = ...,
+ validator: None = ...,
+ repr: _ReprArgType = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool | None = ...,
+ eq: bool | None = ...,
+ order: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+ type: type | None = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def field(
+ *,
+ default: None = ...,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ converter: _ConverterType
+ | list[_ConverterType]
+ | tuple[_ConverterType, ...]
+ | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool | None = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+ type: type | None = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def field(
+ *,
+ default: _T,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ converter: _ConverterType
+ | list[_ConverterType]
+ | tuple[_ConverterType, ...]
+ | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool | None = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+ type: type | None = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def field(
+ *,
+ default: _T | None = ...,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ converter: _ConverterType
+ | list[_ConverterType]
+ | tuple[_ConverterType, ...]
+ | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool | None = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+ type: type | None = ...,
+) -> Any: ...
+@overload
+@dataclass_transform(field_specifiers=(attrib, field))
+def define(
+ maybe_cls: _C,
+ *,
+ these: dict[str, Any] | None = ...,
+ repr: bool = ...,
+ unsafe_hash: bool | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: bool | None = ...,
+ order: bool | None = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+) -> _C: ...
+@overload
+@dataclass_transform(field_specifiers=(attrib, field))
+def define(
+ maybe_cls: None = ...,
+ *,
+ these: dict[str, Any] | None = ...,
+ repr: bool = ...,
+ unsafe_hash: bool | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: bool | None = ...,
+ order: bool | None = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+) -> Callable[[_C], _C]: ...
+
+mutable = define
+
+@overload
+@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field))
+def frozen(
+ maybe_cls: _C,
+ *,
+ these: dict[str, Any] | None = ...,
+ repr: bool = ...,
+ unsafe_hash: bool | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: bool | None = ...,
+ order: bool | None = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+) -> _C: ...
+@overload
+@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field))
+def frozen(
+ maybe_cls: None = ...,
+ *,
+ these: dict[str, Any] | None = ...,
+ repr: bool = ...,
+ unsafe_hash: bool | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: bool | None = ...,
+ order: bool | None = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+) -> Callable[[_C], _C]: ...
+
+class ClassProps:
+ # XXX: somehow when defining/using enums Mypy starts looking at our own
+ # (untyped) code and causes tons of errors.
+ Hashability: Any
+ KeywordOnly: Any
+
+ is_exception: bool
+ is_slotted: bool
+ has_weakref_slot: bool
+ is_frozen: bool
+ # kw_only: ClassProps.KeywordOnly
+ kw_only: Any
+ collected_fields_by_mro: bool
+ added_init: bool
+ added_repr: bool
+ added_eq: bool
+ added_ordering: bool
+ # hashability: ClassProps.Hashability
+ hashability: Any
+ added_match_args: bool
+ added_str: bool
+ added_pickling: bool
+ on_setattr_hook: _OnSetAttrType | None
+ field_transformer: Callable[[Attribute[Any]], Attribute[Any]] | None
+
+ def __init__(
+ self,
+ is_exception: bool,
+ is_slotted: bool,
+ has_weakref_slot: bool,
+ is_frozen: bool,
+ # kw_only: ClassProps.KeywordOnly
+ kw_only: Any,
+ collected_fields_by_mro: bool,
+ added_init: bool,
+ added_repr: bool,
+ added_eq: bool,
+ added_ordering: bool,
+ # hashability: ClassProps.Hashability
+ hashability: Any,
+ added_match_args: bool,
+ added_str: bool,
+ added_pickling: bool,
+ on_setattr_hook: _OnSetAttrType,
+ field_transformer: Callable[[Attribute[Any]], Attribute[Any]],
+ ) -> None: ...
+ @property
+ def is_hashable(self) -> bool: ...
+
+def inspect(cls: type) -> ClassProps: ...
diff --git a/py311/lib/python3.11/site-packages/attrs/converters.py b/py311/lib/python3.11/site-packages/attrs/converters.py
new file mode 100644
index 0000000000000000000000000000000000000000..7821f6c02cca81277d1ecc87b6bdafad886d8b70
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attrs/converters.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.converters import * # noqa: F403
diff --git a/py311/lib/python3.11/site-packages/attrs/exceptions.py b/py311/lib/python3.11/site-packages/attrs/exceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..3323f9d2112c54b203763d45b455bd5abbe020f6
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attrs/exceptions.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.exceptions import * # noqa: F403
diff --git a/py311/lib/python3.11/site-packages/attrs/filters.py b/py311/lib/python3.11/site-packages/attrs/filters.py
new file mode 100644
index 0000000000000000000000000000000000000000..3080f48398e5ed8d3428ca3efeb7500633b0cb0f
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attrs/filters.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.filters import * # noqa: F403
diff --git a/py311/lib/python3.11/site-packages/attrs/py.typed b/py311/lib/python3.11/site-packages/attrs/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/py311/lib/python3.11/site-packages/attrs/setters.py b/py311/lib/python3.11/site-packages/attrs/setters.py
new file mode 100644
index 0000000000000000000000000000000000000000..f3d73bb793dd49c138950961f41943bb26c57fde
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attrs/setters.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.setters import * # noqa: F403
diff --git a/py311/lib/python3.11/site-packages/attrs/validators.py b/py311/lib/python3.11/site-packages/attrs/validators.py
new file mode 100644
index 0000000000000000000000000000000000000000..037e124f29f32d37c1642d159bf828de44f7c349
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/attrs/validators.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.validators import * # noqa: F403
diff --git a/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/INSTALLER b/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+uv
\ No newline at end of file
diff --git a/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/LICENSE b/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..9dc3cea50bac35595e74acd86a29fcd69bb336e0
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 litl, LLC.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/METADATA b/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..adcb9c7ca4b5f4641f537fd925e714c8ab8bb20c
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/METADATA
@@ -0,0 +1,419 @@
+Metadata-Version: 2.1
+Name: backoff
+Version: 2.2.1
+Summary: Function decoration for backoff and retry
+Home-page: https://github.com/litl/backoff
+License: MIT
+Keywords: retry,backoff,decorators
+Author: Bob Green
+Author-email: rgreen@aquent.com
+Requires-Python: >=3.7,<4.0
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Utilities
+Project-URL: Repository, https://github.com/litl/backoff
+Description-Content-Type: text/x-rst
+
+backoff
+=======
+
+.. image:: https://travis-ci.org/litl/backoff.svg
+ :target: https://travis-ci.org/litl/backoff
+.. image:: https://coveralls.io/repos/litl/backoff/badge.svg
+ :target: https://coveralls.io/r/litl/backoff?branch=python-3
+.. image:: https://github.com/litl/backoff/workflows/CodeQL/badge.svg
+ :target: https://github.com/litl/backoff/actions/workflows/codeql-analysis.yml
+.. image:: https://img.shields.io/pypi/v/backoff.svg
+ :target: https://pypi.python.org/pypi/backoff
+.. image:: https://img.shields.io/github/license/litl/backoff
+ :target: https://github.com/litl/backoff/blob/master/LICENSE
+
+**Function decoration for backoff and retry**
+
+This module provides function decorators which can be used to wrap a
+function such that it will be retried until some condition is met. It
+is meant to be of use when accessing unreliable resources with the
+potential for intermittent failures i.e. network resources and external
+APIs. Somewhat more generally, it may also be of use for dynamically
+polling resources for externally generated content.
+
+Decorators support both regular functions for synchronous code and
+`asyncio `__'s coroutines
+for asynchronous code.
+
+Examples
+========
+
+Since Kenneth Reitz's `requests `_ module
+has become a defacto standard for synchronous HTTP clients in Python,
+networking examples below are written using it, but it is in no way required
+by the backoff module.
+
+@backoff.on_exception
+---------------------
+
+The ``on_exception`` decorator is used to retry when a specified exception
+is raised. Here's an example using exponential backoff when any
+``requests`` exception is raised:
+
+.. code-block:: python
+
+ @backoff.on_exception(backoff.expo,
+ requests.exceptions.RequestException)
+ def get_url(url):
+ return requests.get(url)
+
+The decorator will also accept a tuple of exceptions for cases where
+the same backoff behavior is desired for more than one exception type:
+
+.. code-block:: python
+
+ @backoff.on_exception(backoff.expo,
+ (requests.exceptions.Timeout,
+ requests.exceptions.ConnectionError))
+ def get_url(url):
+ return requests.get(url)
+
+**Give Up Conditions**
+
+Optional keyword arguments can specify conditions under which to give
+up.
+
+The keyword argument ``max_time`` specifies the maximum amount
+of total time in seconds that can elapse before giving up.
+
+.. code-block:: python
+
+ @backoff.on_exception(backoff.expo,
+ requests.exceptions.RequestException,
+ max_time=60)
+ def get_url(url):
+ return requests.get(url)
+
+
+Keyword argument ``max_tries`` specifies the maximum number of calls
+to make to the target function before giving up.
+
+.. code-block:: python
+
+ @backoff.on_exception(backoff.expo,
+ requests.exceptions.RequestException,
+ max_tries=8,
+ jitter=None)
+ def get_url(url):
+ return requests.get(url)
+
+
+In some cases the raised exception instance itself may need to be
+inspected in order to determine if it is a retryable condition. The
+``giveup`` keyword arg can be used to specify a function which accepts
+the exception and returns a truthy value if the exception should not
+be retried:
+
+.. code-block:: python
+
+ def fatal_code(e):
+ return 400 <= e.response.status_code < 500
+
+ @backoff.on_exception(backoff.expo,
+ requests.exceptions.RequestException,
+ max_time=300,
+ giveup=fatal_code)
+ def get_url(url):
+ return requests.get(url)
+
+By default, when a give up event occurs, the exception in question is reraised
+and so code calling an `on_exception`-decorated function may still
+need to do exception handling. This behavior can optionally be disabled
+using the `raise_on_giveup` keyword argument.
+
+In the code below, `requests.exceptions.RequestException` will not be raised
+when giveup occurs. Note that the decorated function will return `None` in this
+case, regardless of the logic in the `on_exception` handler.
+
+.. code-block:: python
+
+ def fatal_code(e):
+ return 400 <= e.response.status_code < 500
+
+ @backoff.on_exception(backoff.expo,
+ requests.exceptions.RequestException,
+ max_time=300,
+ raise_on_giveup=False,
+ giveup=fatal_code)
+ def get_url(url):
+ return requests.get(url)
+
+This is useful for non-mission critical code where you still wish to retry
+the code inside of `backoff.on_exception` but wish to proceed with execution
+even if all retries fail.
+
+@backoff.on_predicate
+---------------------
+
+The ``on_predicate`` decorator is used to retry when a particular
+condition is true of the return value of the target function. This may
+be useful when polling a resource for externally generated content.
+
+Here's an example which uses a fibonacci sequence backoff when the
+return value of the target function is the empty list:
+
+.. code-block:: python
+
+ @backoff.on_predicate(backoff.fibo, lambda x: x == [], max_value=13)
+ def poll_for_messages(queue):
+ return queue.get()
+
+Extra keyword arguments are passed when initializing the
+wait generator, so the ``max_value`` param above is passed as a keyword
+arg when initializing the fibo generator.
+
+When not specified, the predicate param defaults to the falsey test,
+so the above can more concisely be written:
+
+.. code-block:: python
+
+ @backoff.on_predicate(backoff.fibo, max_value=13)
+ def poll_for_message(queue):
+ return queue.get()
+
+More simply, a function which continues polling every second until it
+gets a non-falsey result could be defined like like this:
+
+.. code-block:: python
+
+ @backoff.on_predicate(backoff.constant, jitter=None, interval=1)
+ def poll_for_message(queue):
+ return queue.get()
+
+The jitter is disabled in order to keep the polling frequency fixed.
+
+@backoff.runtime
+----------------
+
+You can also use the ``backoff.runtime`` generator to make use of the
+return value or thrown exception of the decorated method.
+
+For example, to use the value in the ``Retry-After`` header of the response:
+
+.. code-block:: python
+
+ @backoff.on_predicate(
+ backoff.runtime,
+ predicate=lambda r: r.status_code == 429,
+ value=lambda r: int(r.headers.get("Retry-After")),
+ jitter=None,
+ )
+ def get_url():
+ return requests.get(url)
+
+Jitter
+------
+
+A jitter algorithm can be supplied with the ``jitter`` keyword arg to
+either of the backoff decorators. This argument should be a function
+accepting the original unadulterated backoff value and returning it's
+jittered counterpart.
+
+As of version 1.2, the default jitter function ``backoff.full_jitter``
+implements the 'Full Jitter' algorithm as defined in the AWS
+Architecture Blog's `Exponential Backoff And Jitter
+`_ post.
+Note that with this algorithm, the time yielded by the wait generator
+is actually the *maximum* amount of time to wait.
+
+Previous versions of backoff defaulted to adding some random number of
+milliseconds (up to 1s) to the raw sleep value. If desired, this
+behavior is now available as ``backoff.random_jitter``.
+
+Using multiple decorators
+-------------------------
+
+The backoff decorators may also be combined to specify different
+backoff behavior for different cases:
+
+.. code-block:: python
+
+ @backoff.on_predicate(backoff.fibo, max_value=13)
+ @backoff.on_exception(backoff.expo,
+ requests.exceptions.HTTPError,
+ max_time=60)
+ @backoff.on_exception(backoff.expo,
+ requests.exceptions.Timeout,
+ max_time=300)
+ def poll_for_message(queue):
+ return queue.get()
+
+
+Runtime Configuration
+---------------------
+
+The decorator functions ``on_exception`` and ``on_predicate`` are
+generally evaluated at import time. This is fine when the keyword args
+are passed as constant values, but suppose we want to consult a
+dictionary with configuration options that only become available at
+runtime. The relevant values are not available at import time. Instead,
+decorator functions can be passed callables which are evaluated at
+runtime to obtain the value:
+
+.. code-block:: python
+
+ def lookup_max_time():
+ # pretend we have a global reference to 'app' here
+ # and that it has a dictionary-like 'config' property
+ return app.config["BACKOFF_MAX_TIME"]
+
+ @backoff.on_exception(backoff.expo,
+ ValueError,
+ max_time=lookup_max_time)
+
+Event handlers
+--------------
+
+Both backoff decorators optionally accept event handler functions
+using the keyword arguments ``on_success``, ``on_backoff``, and ``on_giveup``.
+This may be useful in reporting statistics or performing other custom
+logging.
+
+Handlers must be callables with a unary signature accepting a dict
+argument. This dict contains the details of the invocation. Valid keys
+include:
+
+* *target*: reference to the function or method being invoked
+* *args*: positional arguments to func
+* *kwargs*: keyword arguments to func
+* *tries*: number of invocation tries so far
+* *elapsed*: elapsed time in seconds so far
+* *wait*: seconds to wait (``on_backoff`` handler only)
+* *value*: value triggering backoff (``on_predicate`` decorator only)
+
+A handler which prints the details of the backoff event could be
+implemented like so:
+
+.. code-block:: python
+
+ def backoff_hdlr(details):
+ print ("Backing off {wait:0.1f} seconds after {tries} tries "
+ "calling function {target} with args {args} and kwargs "
+ "{kwargs}".format(**details))
+
+ @backoff.on_exception(backoff.expo,
+ requests.exceptions.RequestException,
+ on_backoff=backoff_hdlr)
+ def get_url(url):
+ return requests.get(url)
+
+**Multiple handlers per event type**
+
+In all cases, iterables of handler functions are also accepted, which
+are called in turn. For example, you might provide a simple list of
+handler functions as the value of the ``on_backoff`` keyword arg:
+
+.. code-block:: python
+
+ @backoff.on_exception(backoff.expo,
+ requests.exceptions.RequestException,
+ on_backoff=[backoff_hdlr1, backoff_hdlr2])
+ def get_url(url):
+ return requests.get(url)
+
+**Getting exception info**
+
+In the case of the ``on_exception`` decorator, all ``on_backoff`` and
+``on_giveup`` handlers are called from within the except block for the
+exception being handled. Therefore exception info is available to the
+handler functions via the python standard library, specifically
+``sys.exc_info()`` or the ``traceback`` module. The exception is also
+available at the *exception* key in the `details` dict passed to the
+handlers.
+
+Asynchronous code
+-----------------
+
+Backoff supports asynchronous execution in Python 3.5 and above.
+
+To use backoff in asynchronous code based on
+`asyncio `__
+you simply need to apply ``backoff.on_exception`` or ``backoff.on_predicate``
+to coroutines.
+You can also use coroutines for the ``on_success``, ``on_backoff``, and
+``on_giveup`` event handlers, with the interface otherwise being identical.
+
+The following examples use `aiohttp `__
+asynchronous HTTP client/server library.
+
+.. code-block:: python
+
+ @backoff.on_exception(backoff.expo, aiohttp.ClientError, max_time=60)
+ async def get_url(url):
+ async with aiohttp.ClientSession(raise_for_status=True) as session:
+ async with session.get(url) as response:
+ return await response.text()
+
+Logging configuration
+---------------------
+
+By default, backoff and retry attempts are logged to the 'backoff'
+logger. By default, this logger is configured with a NullHandler, so
+there will be nothing output unless you configure a handler.
+Programmatically, this might be accomplished with something as simple
+as:
+
+.. code-block:: python
+
+ logging.getLogger('backoff').addHandler(logging.StreamHandler())
+
+The default logging level is INFO, which corresponds to logging
+anytime a retry event occurs. If you would instead like to log
+only when a giveup event occurs, set the logger level to ERROR.
+
+.. code-block:: python
+
+ logging.getLogger('backoff').setLevel(logging.ERROR)
+
+It is also possible to specify an alternate logger with the ``logger``
+keyword argument. If a string value is specified the logger will be
+looked up by name.
+
+.. code-block:: python
+
+ @backoff.on_exception(backoff.expo,
+ requests.exceptions.RequestException,
+ logger='my_logger')
+ # ...
+
+It is also supported to specify a Logger (or LoggerAdapter) object
+directly.
+
+.. code-block:: python
+
+ my_logger = logging.getLogger('my_logger')
+ my_handler = logging.StreamHandler()
+ my_logger.addHandler(my_handler)
+ my_logger.setLevel(logging.ERROR)
+
+ @backoff.on_exception(backoff.expo,
+ requests.exceptions.RequestException,
+ logger=my_logger)
+ # ...
+
+Default logging can be disabled all together by specifying
+``logger=None``. In this case, if desired alternative logging behavior
+could be defined by using custom event handlers.
+
diff --git a/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/RECORD b/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..51a2ed6e0cfc55fd74e678721c5526e8d8837bdd
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/RECORD
@@ -0,0 +1,16 @@
+backoff-2.2.1.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2
+backoff-2.2.1.dist-info/LICENSE,sha256=KmtNX4hNTXob8E6n3xlEzxKzLjWnmobQoHWi0_QPuaw,1077
+backoff-2.2.1.dist-info/METADATA,sha256=Wgffksy-dcDJ4GaoqXyjc8XxrE0DQz3FbWwmrDqo-6U,14827
+backoff-2.2.1.dist-info/RECORD,,
+backoff-2.2.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+backoff-2.2.1.dist-info/WHEEL,sha256=gSF7fibx4crkLz_A-IKR6kcuq0jJ64KNCkG8_bcaEao,88
+backoff/__init__.py,sha256=Jl49Ur_5GTiySyaw8URBXlfClWn0H7Pk5P95m1awNZ8,898
+backoff/_async.py,sha256=ZvqmfxxQ2o-UjQUQin12Ojc4eXOXb43RWSQaPaqbALI,6775
+backoff/_common.py,sha256=8s3_5AJH8hiHd9GR2PdKqiaeE2sdEUoyf6cW9OCo1F8,3478
+backoff/_decorator.py,sha256=EuYHrg8rSPaKJ_KeZ99WEg9knrfyn_-ck12Cwfcb68U,9804
+backoff/_jitter.py,sha256=LjJShpjryk9sWBCWiz-3UX1DJCx6rebNJ5Bf3nPMlYQ,782
+backoff/_sync.py,sha256=DT_ktufPPb0nut9WCAKe5UE7sTYRl9f93PRPX6jP8ro,4214
+backoff/_typing.py,sha256=RrJ50kqdeNZvSmoMjNoAUpMCm44V8qreAVgGb-KMl-g,1328
+backoff/_wait_gen.py,sha256=U5AR3Isf4aZs2SC0x9PGI3Wh8JR7XGkV90SiU56wWvw,2396
+backoff/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+backoff/types.py,sha256=4DGG6Ltcz0wVfXrk0YBOnp_oPpcki4c0BOnodRhgoqg,73
diff --git a/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/REQUESTED b/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/REQUESTED
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/WHEEL b/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..5430e92e31e9ed6f3bd749246e812f6295b9e168
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/backoff-2.2.1.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: poetry-core 1.2.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/py311/lib/python3.11/site-packages/backoff/_sync.py b/py311/lib/python3.11/site-packages/backoff/_sync.py
new file mode 100644
index 0000000000000000000000000000000000000000..4371e1680a78ed73dd31f2f30daf79799b27dc44
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/backoff/_sync.py
@@ -0,0 +1,132 @@
+# coding:utf-8
+import datetime
+import functools
+import time
+from datetime import timedelta
+
+from backoff._common import (_init_wait_gen, _maybe_call, _next_wait)
+
+
+def _call_handlers(hdlrs, target, args, kwargs, tries, elapsed, **extra):
+ details = {
+ 'target': target,
+ 'args': args,
+ 'kwargs': kwargs,
+ 'tries': tries,
+ 'elapsed': elapsed,
+ }
+ details.update(extra)
+ for hdlr in hdlrs:
+ hdlr(details)
+
+
+def retry_predicate(target, wait_gen, predicate,
+ *,
+ max_tries, max_time, jitter,
+ on_success, on_backoff, on_giveup,
+ wait_gen_kwargs):
+
+ @functools.wraps(target)
+ def retry(*args, **kwargs):
+ max_tries_value = _maybe_call(max_tries)
+ max_time_value = _maybe_call(max_time)
+
+ tries = 0
+ start = datetime.datetime.now()
+ wait = _init_wait_gen(wait_gen, wait_gen_kwargs)
+ while True:
+ tries += 1
+ elapsed = timedelta.total_seconds(datetime.datetime.now() - start)
+ details = {
+ "target": target,
+ "args": args,
+ "kwargs": kwargs,
+ "tries": tries,
+ "elapsed": elapsed,
+ }
+
+ ret = target(*args, **kwargs)
+ if predicate(ret):
+ max_tries_exceeded = (tries == max_tries_value)
+ max_time_exceeded = (max_time_value is not None and
+ elapsed >= max_time_value)
+
+ if max_tries_exceeded or max_time_exceeded:
+ _call_handlers(on_giveup, **details, value=ret)
+ break
+
+ try:
+ seconds = _next_wait(wait, ret, jitter, elapsed,
+ max_time_value)
+ except StopIteration:
+ _call_handlers(on_giveup, **details)
+ break
+
+ _call_handlers(on_backoff, **details,
+ value=ret, wait=seconds)
+
+ time.sleep(seconds)
+ continue
+ else:
+ _call_handlers(on_success, **details, value=ret)
+ break
+
+ return ret
+
+ return retry
+
+
+def retry_exception(target, wait_gen, exception,
+ *,
+ max_tries, max_time, jitter, giveup,
+ on_success, on_backoff, on_giveup, raise_on_giveup,
+ wait_gen_kwargs):
+
+ @functools.wraps(target)
+ def retry(*args, **kwargs):
+ max_tries_value = _maybe_call(max_tries)
+ max_time_value = _maybe_call(max_time)
+
+ tries = 0
+ start = datetime.datetime.now()
+ wait = _init_wait_gen(wait_gen, wait_gen_kwargs)
+ while True:
+ tries += 1
+ elapsed = timedelta.total_seconds(datetime.datetime.now() - start)
+ details = {
+ "target": target,
+ "args": args,
+ "kwargs": kwargs,
+ "tries": tries,
+ "elapsed": elapsed,
+ }
+
+ try:
+ ret = target(*args, **kwargs)
+ except exception as e:
+ max_tries_exceeded = (tries == max_tries_value)
+ max_time_exceeded = (max_time_value is not None and
+ elapsed >= max_time_value)
+
+ if giveup(e) or max_tries_exceeded or max_time_exceeded:
+ _call_handlers(on_giveup, **details, exception=e)
+ if raise_on_giveup:
+ raise
+ return None
+
+ try:
+ seconds = _next_wait(wait, e, jitter, elapsed,
+ max_time_value)
+ except StopIteration:
+ _call_handlers(on_giveup, **details, exception=e)
+ raise e
+
+ _call_handlers(on_backoff, **details, wait=seconds,
+ exception=e)
+
+ time.sleep(seconds)
+ else:
+ _call_handlers(on_success, **details)
+
+ return ret
+ return retry
diff --git a/py311/lib/python3.11/site-packages/black/__init__.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/__init__.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..32a68108842c418a008f25712a46e2a8f1fcc496
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/__init__.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/__init__.py b/py311/lib/python3.11/site-packages/black/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..180f5883b1d85710ff73ea3702676f15d9eb9cec
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/__init__.py
@@ -0,0 +1,1654 @@
+import io
+import json
+import platform
+import re
+import sys
+import tokenize
+import traceback
+from collections.abc import (
+ Collection,
+ Generator,
+ MutableMapping,
+ Sequence,
+)
+from contextlib import nullcontext
+from dataclasses import replace
+from datetime import datetime, timezone
+from enum import Enum
+from json.decoder import JSONDecodeError
+from pathlib import Path
+from re import Pattern
+from typing import Any
+
+import click
+from click.core import ParameterSource
+from mypy_extensions import mypyc_attr
+from pathspec import PathSpec
+from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
+
+from _black_version import version as __version__
+from black.cache import Cache
+from black.comments import normalize_fmt_off
+from black.const import (
+ DEFAULT_EXCLUDES,
+ DEFAULT_INCLUDES,
+ DEFAULT_LINE_LENGTH,
+ STDIN_PLACEHOLDER,
+)
+from black.files import (
+ best_effort_relative_path,
+ find_project_root,
+ find_pyproject_toml,
+ find_user_pyproject_toml,
+ gen_python_files,
+ get_gitignore,
+ parse_pyproject_toml,
+ path_is_excluded,
+ resolves_outside_root_or_cannot_stat,
+ wrap_stream_for_windows,
+)
+from black.handle_ipynb_magics import (
+ PYTHON_CELL_MAGICS,
+ jupyter_dependencies_are_installed,
+ mask_cell,
+ put_trailing_semicolon_back,
+ remove_trailing_semicolon,
+ unmask_cell,
+ validate_cell,
+)
+from black.linegen import LN, LineGenerator, transform_line
+from black.lines import EmptyLineTracker, LinesBlock
+from black.mode import FUTURE_FLAG_TO_FEATURE, VERSION_TO_FEATURES, Feature
+from black.mode import Mode as Mode # re-exported
+from black.mode import Preview, TargetVersion, supports_feature
+from black.nodes import STARS, is_number_token, is_simple_decorator_expression, syms
+from black.output import color_diff, diff, dump_to_file, err, ipynb_diff, out
+from black.parsing import ( # noqa F401
+ ASTSafetyError,
+ InvalidInput,
+ lib2to3_parse,
+ parse_ast,
+ stringify_ast,
+)
+from black.ranges import (
+ adjusted_lines,
+ convert_unchanged_lines,
+ parse_line_ranges,
+ sanitized_lines,
+)
+from black.report import Changed, NothingChanged, Report
+from blib2to3.pgen2 import token
+from blib2to3.pytree import Leaf, Node
+
+COMPILED = Path(__file__).suffix in (".pyd", ".so")
+
+# types
+FileContent = str
+Encoding = str
+NewLine = str
+
+
+class WriteBack(Enum):
+ NO = 0
+ YES = 1
+ DIFF = 2
+ CHECK = 3
+ COLOR_DIFF = 4
+
+ @classmethod
+ def from_configuration(
+ cls, *, check: bool, diff: bool, color: bool = False
+ ) -> "WriteBack":
+ if check and not diff:
+ return cls.CHECK
+
+ if diff and color:
+ return cls.COLOR_DIFF
+
+ return cls.DIFF if diff else cls.YES
+
+
+# Legacy name, left for integrations.
+FileMode = Mode
+
+
+def read_pyproject_toml(
+ ctx: click.Context, param: click.Parameter, value: str | None
+) -> str | None:
+ """Inject Black configuration from "pyproject.toml" into defaults in `ctx`.
+
+ Returns the path to a successfully found and read configuration file, None
+ otherwise.
+ """
+ if not value:
+ value = find_pyproject_toml(
+ ctx.params.get("src", ()), ctx.params.get("stdin_filename", None)
+ )
+ if value is None:
+ return None
+
+ try:
+ config = parse_pyproject_toml(value)
+ except (OSError, ValueError) as e:
+ raise click.FileError(
+ filename=value, hint=f"Error reading configuration file: {e}"
+ ) from None
+
+ if not config:
+ return None
+ else:
+ spellcheck_pyproject_toml_keys(ctx, list(config), value)
+ # Sanitize the values to be Click friendly. For more information please see:
+ # https://github.com/psf/black/issues/1458
+ # https://github.com/pallets/click/issues/1567
+ config = {
+ k: str(v) if not isinstance(v, (list, dict)) else v
+ for k, v in config.items()
+ }
+
+ target_version = config.get("target_version")
+ if target_version is not None and not isinstance(target_version, list):
+ raise click.BadOptionUsage(
+ "target-version", "Config key target-version must be a list"
+ )
+
+ exclude = config.get("exclude")
+ if exclude is not None and not isinstance(exclude, str):
+ raise click.BadOptionUsage("exclude", "Config key exclude must be a string")
+
+ extend_exclude = config.get("extend_exclude")
+ if extend_exclude is not None and not isinstance(extend_exclude, str):
+ raise click.BadOptionUsage(
+ "extend-exclude", "Config key extend-exclude must be a string"
+ )
+
+ line_ranges = config.get("line_ranges")
+ if line_ranges is not None:
+ raise click.BadOptionUsage(
+ "line-ranges", "Cannot use line-ranges in the pyproject.toml file."
+ )
+
+ default_map: dict[str, Any] = {}
+ if ctx.default_map:
+ default_map.update(ctx.default_map)
+ default_map.update(config)
+
+ ctx.default_map = default_map
+ return value
+
+
+def spellcheck_pyproject_toml_keys(
+ ctx: click.Context, config_keys: list[str], config_file_path: str
+) -> None:
+ invalid_keys: list[str] = []
+ available_config_options = {param.name for param in ctx.command.params}
+ invalid_keys = [key for key in config_keys if key not in available_config_options]
+ if invalid_keys:
+ keys_str = ", ".join(map(repr, invalid_keys))
+ out(
+ f"Invalid config keys detected: {keys_str} (in {config_file_path})",
+ fg="red",
+ )
+
+
+def target_version_option_callback(
+ c: click.Context, p: click.Option | click.Parameter, v: tuple[str, ...]
+) -> list[TargetVersion]:
+ """Compute the target versions from a --target-version flag.
+
+ This is its own function because mypy couldn't infer the type correctly
+ when it was a lambda, causing mypyc trouble.
+ """
+ return [TargetVersion[val.upper()] for val in v]
+
+
+def enable_unstable_feature_callback(
+ c: click.Context, p: click.Option | click.Parameter, v: tuple[str, ...]
+) -> list[Preview]:
+ """Compute the features from an --enable-unstable-feature flag."""
+ return [Preview[val] for val in v]
+
+
+def re_compile_maybe_verbose(regex: str) -> Pattern[str]:
+ """Compile a regular expression string in `regex`.
+
+ If it contains newlines, use verbose mode.
+ """
+ if "\n" in regex:
+ regex = "(?x)" + regex
+ compiled: Pattern[str] = re.compile(regex)
+ return compiled
+
+
+def validate_regex(
+ ctx: click.Context,
+ param: click.Parameter,
+ value: str | None,
+) -> Pattern[str] | None:
+ try:
+ return re_compile_maybe_verbose(value) if value is not None else None
+ except re.error as e:
+ raise click.BadParameter(f"Not a valid regular expression: {e}") from None
+
+
+@click.command(
+ context_settings={"help_option_names": ["-h", "--help"]},
+ # While Click does set this field automatically using the docstring, mypyc
+ # (annoyingly) strips 'em so we need to set it here too.
+ help="The uncompromising code formatter.",
+)
+@click.option("-c", "--code", type=str, help="Format the code passed in as a string.")
+@click.option(
+ "-l",
+ "--line-length",
+ type=int,
+ default=DEFAULT_LINE_LENGTH,
+ help="How many characters per line to allow.",
+ show_default=True,
+)
+@click.option(
+ "-t",
+ "--target-version",
+ type=click.Choice([v.name.lower() for v in TargetVersion]),
+ callback=target_version_option_callback,
+ multiple=True,
+ help=(
+ "Python versions that should be supported by Black's output. You should"
+ " include all versions that your code supports. By default, Black will infer"
+ " target versions from the project metadata in pyproject.toml. If this does"
+ " not yield conclusive results, Black will use per-file auto-detection."
+ ),
+)
+@click.option(
+ "--pyi",
+ is_flag=True,
+ help=(
+ "Format all input files like typing stubs regardless of file extension. This"
+ " is useful when piping source on standard input."
+ ),
+)
+@click.option(
+ "--ipynb",
+ is_flag=True,
+ help=(
+ "Format all input files like Jupyter Notebooks regardless of file extension."
+ " This is useful when piping source on standard input."
+ ),
+)
+@click.option(
+ "--python-cell-magics",
+ multiple=True,
+ help=(
+ "When processing Jupyter Notebooks, add the given magic to the list"
+ f" of known python-magics ({', '.join(sorted(PYTHON_CELL_MAGICS))})."
+ " Useful for formatting cells with custom python magics."
+ ),
+ default=[],
+)
+@click.option(
+ "-x",
+ "--skip-source-first-line",
+ is_flag=True,
+ help="Skip the first line of the source code.",
+)
+@click.option(
+ "-S",
+ "--skip-string-normalization",
+ is_flag=True,
+ help="Don't normalize string quotes or prefixes.",
+)
+@click.option(
+ "-C",
+ "--skip-magic-trailing-comma",
+ is_flag=True,
+ help="Don't use trailing commas as a reason to split lines.",
+)
+@click.option(
+ "--preview",
+ is_flag=True,
+ help=(
+ "Enable potentially disruptive style changes that may be added to Black's main"
+ " functionality in the next major release."
+ ),
+)
+@click.option(
+ "--unstable",
+ is_flag=True,
+ help=(
+ "Enable potentially disruptive style changes that have known bugs or are not"
+ " currently expected to make it into the stable style Black's next major"
+ " release. Implies --preview."
+ ),
+)
+@click.option(
+ "--enable-unstable-feature",
+ type=click.Choice([v.name for v in Preview]),
+ callback=enable_unstable_feature_callback,
+ multiple=True,
+ help=(
+ "Enable specific features included in the `--unstable` style. Requires"
+ " `--preview`. No compatibility guarantees are provided on the behavior"
+ " or existence of any unstable features."
+ ),
+)
+@click.option(
+ "--check",
+ is_flag=True,
+ help=(
+ "Don't write the files back, just return the status. Return code 0 means"
+ " nothing would change. Return code 1 means some files would be reformatted."
+ " Return code 123 means there was an internal error."
+ ),
+)
+@click.option(
+ "--diff",
+ is_flag=True,
+ help=(
+ "Don't write the files back, just output a diff to indicate what changes"
+ " Black would've made. They are printed to stdout so capturing them is simple."
+ ),
+)
+@click.option(
+ "--color/--no-color",
+ is_flag=True,
+ help="Show (or do not show) colored diff. Only applies when --diff is given.",
+)
+@click.option(
+ "--line-ranges",
+ multiple=True,
+ metavar="START-END",
+ help=(
+ "When specified, Black will try its best to only format these lines. This"
+ " option can be specified multiple times, and a union of the lines will be"
+ " formatted. Each range must be specified as two integers connected by a `-`:"
+ " `-`. The `` and `` integer indices are 1-based and"
+ " inclusive on both ends."
+ ),
+ default=(),
+)
+@click.option(
+ "--fast/--safe",
+ is_flag=True,
+ help=(
+ "By default, Black performs an AST safety check after formatting your code."
+ " The --fast flag turns off this check and the --safe flag explicitly enables"
+ " it. [default: --safe]"
+ ),
+)
+@click.option(
+ "--required-version",
+ type=str,
+ help=(
+ "Require a specific version of Black to be running. This is useful for"
+ " ensuring that all contributors to your project are using the same"
+ " version, because different versions of Black may format code a little"
+ " differently. This option can be set in a configuration file for consistent"
+ " results across environments."
+ ),
+)
+@click.option(
+ "--exclude",
+ type=str,
+ callback=validate_regex,
+ help=(
+ "A regular expression that matches files and directories that should be"
+ " excluded on recursive searches. An empty value means no paths are excluded."
+ " Use forward slashes for directories on all platforms (Windows, too)."
+ " By default, Black also ignores all paths listed in .gitignore. Changing this"
+ f" value will override all default exclusions. [default: {DEFAULT_EXCLUDES}]"
+ ),
+ show_default=False,
+)
+@click.option(
+ "--extend-exclude",
+ type=str,
+ callback=validate_regex,
+ help=(
+ "Like --exclude, but adds additional files and directories on top of the"
+ " default values instead of overriding them."
+ ),
+)
+@click.option(
+ "--force-exclude",
+ type=str,
+ callback=validate_regex,
+ help=(
+ "Like --exclude, but files and directories matching this regex will be excluded"
+ " even when they are passed explicitly as arguments. This is useful when"
+ " invoking Black programmatically on changed files, such as in a pre-commit"
+ " hook or editor plugin."
+ ),
+)
+@click.option(
+ "--stdin-filename",
+ type=str,
+ is_eager=True,
+ help=(
+ "The name of the file when passing it through stdin. Useful to make sure Black"
+ " will respect the --force-exclude option on some editors that rely on using"
+ " stdin."
+ ),
+)
+@click.option(
+ "--include",
+ type=str,
+ default=DEFAULT_INCLUDES,
+ callback=validate_regex,
+ help=(
+ "A regular expression that matches files and directories that should be"
+ " included on recursive searches. An empty value means all files are included"
+ " regardless of the name. Use forward slashes for directories on all platforms"
+ " (Windows, too). Overrides all exclusions, including from .gitignore and"
+ " command line options."
+ ),
+ show_default=True,
+)
+@click.option(
+ "-W",
+ "--workers",
+ type=click.IntRange(min=1),
+ default=None,
+ help=(
+ "When Black formats multiple files, it may use a process pool to speed up"
+ " formatting. This option controls the number of parallel workers. This can"
+ " also be specified via the BLACK_NUM_WORKERS environment variable. Defaults"
+ " to the number of CPUs in the system."
+ ),
+)
+@click.option(
+ "-q",
+ "--quiet",
+ is_flag=True,
+ help=(
+ "Stop emitting all non-critical output. Error messages will still be emitted"
+ " (which can silenced by 2>/dev/null)."
+ ),
+)
+@click.option(
+ "-v",
+ "--verbose",
+ is_flag=True,
+ help=(
+ "Emit messages about files that were not changed or were ignored due to"
+ " exclusion patterns. If Black is using a configuration file, a message"
+ " detailing which one it is using will be emitted."
+ ),
+)
+@click.version_option(
+ version=__version__,
+ message=(
+ f"%(prog)s, %(version)s (compiled: {'yes' if COMPILED else 'no'})\n"
+ f"Python ({platform.python_implementation()}) {platform.python_version()}"
+ ),
+)
+@click.argument(
+ "src",
+ nargs=-1,
+ type=click.Path(
+ exists=True, file_okay=True, dir_okay=True, readable=True, allow_dash=True
+ ),
+ is_eager=True,
+ metavar="SRC ...",
+)
+@click.option(
+ "--config",
+ type=click.Path(
+ exists=True,
+ file_okay=True,
+ dir_okay=False,
+ readable=True,
+ allow_dash=False,
+ path_type=str,
+ ),
+ is_eager=True,
+ callback=read_pyproject_toml,
+ help="Read configuration options from a configuration file.",
+)
+@click.option(
+ "--no-cache",
+ is_flag=True,
+ help=(
+ "Skip reading and writing the cache, forcing Black to reformat all"
+ " included files."
+ ),
+)
+@click.pass_context
+def main(
+ ctx: click.Context,
+ code: str | None,
+ line_length: int,
+ target_version: list[TargetVersion],
+ check: bool,
+ diff: bool,
+ line_ranges: Sequence[str],
+ color: bool,
+ fast: bool,
+ pyi: bool,
+ ipynb: bool,
+ python_cell_magics: Sequence[str],
+ skip_source_first_line: bool,
+ skip_string_normalization: bool,
+ skip_magic_trailing_comma: bool,
+ preview: bool,
+ unstable: bool,
+ enable_unstable_feature: list[Preview],
+ quiet: bool,
+ verbose: bool,
+ required_version: str | None,
+ include: Pattern[str],
+ exclude: Pattern[str] | None,
+ extend_exclude: Pattern[str] | None,
+ force_exclude: Pattern[str] | None,
+ stdin_filename: str | None,
+ workers: int | None,
+ src: tuple[str, ...],
+ config: str | None,
+ no_cache: bool,
+) -> None:
+ """The uncompromising code formatter."""
+ ctx.ensure_object(dict)
+
+ assert sys.version_info >= (3, 10), "Black requires Python 3.10+"
+ if sys.version_info[:3] == (3, 12, 5):
+ out(
+ "Python 3.12.5 has a memory safety issue that can cause Black's "
+ "AST safety checks to fail. "
+ "Please upgrade to Python 3.12.6 or downgrade to Python 3.12.4"
+ )
+ ctx.exit(1)
+
+ if src and code is not None:
+ out(
+ main.get_usage(ctx)
+ + "\n\n'SRC' and 'code' cannot be passed simultaneously."
+ )
+ ctx.exit(1)
+ if not src and code is None:
+ out(main.get_usage(ctx) + "\n\nOne of 'SRC' or 'code' is required.")
+ ctx.exit(1)
+
+ # It doesn't do anything if --unstable is also passed, so just allow it.
+ if enable_unstable_feature and not (preview or unstable):
+ out(
+ main.get_usage(ctx)
+ + "\n\n'--enable-unstable-feature' requires '--preview'."
+ )
+ ctx.exit(1)
+
+ root, method = (
+ find_project_root(src, stdin_filename) if code is None else (None, None)
+ )
+ ctx.obj["root"] = root
+
+ if verbose:
+ if root:
+ out(
+ f"Identified `{root}` as project root containing a {method}.",
+ fg="blue",
+ )
+
+ if config:
+ config_source = ctx.get_parameter_source("config")
+ user_level_config = str(find_user_pyproject_toml())
+ if config == user_level_config:
+ out(
+ "Using configuration from user-level config at "
+ f"'{user_level_config}'.",
+ fg="blue",
+ )
+ elif config_source in (
+ ParameterSource.DEFAULT,
+ ParameterSource.DEFAULT_MAP,
+ ):
+ out("Using configuration from project root.", fg="blue")
+ else:
+ out(f"Using configuration in '{config}'.", fg="blue")
+ if ctx.default_map:
+ for param, value in ctx.default_map.items():
+ out(f"{param}: {value}")
+
+ error_msg = "Oh no! 💥 💔 💥"
+ if (
+ required_version
+ and required_version != __version__
+ and required_version != __version__.split(".")[0]
+ ):
+ err(
+ f"{error_msg} The required version `{required_version}` does not match"
+ f" the running version `{__version__}`!"
+ )
+ ctx.exit(1)
+ if ipynb and pyi:
+ err("Cannot pass both `pyi` and `ipynb` flags!")
+ ctx.exit(1)
+
+ write_back = WriteBack.from_configuration(check=check, diff=diff, color=color)
+ if target_version:
+ versions = set(target_version)
+ else:
+ # We'll autodetect later.
+ versions = set()
+ mode = Mode(
+ target_versions=versions,
+ line_length=line_length,
+ is_pyi=pyi,
+ is_ipynb=ipynb,
+ skip_source_first_line=skip_source_first_line,
+ string_normalization=not skip_string_normalization,
+ magic_trailing_comma=not skip_magic_trailing_comma,
+ preview=preview,
+ unstable=unstable,
+ python_cell_magics=set(python_cell_magics),
+ enabled_features=set(enable_unstable_feature),
+ )
+
+ lines: list[tuple[int, int]] = []
+ if line_ranges:
+ if ipynb:
+ err("Cannot use --line-ranges with ipynb files.")
+ ctx.exit(1)
+
+ try:
+ lines = parse_line_ranges(line_ranges)
+ except ValueError as e:
+ err(str(e))
+ ctx.exit(1)
+
+ if code is not None:
+ # Run in quiet mode by default with -c; the extra output isn't useful.
+ # You can still pass -v to get verbose output.
+ quiet = True
+
+ report = Report(check=check, diff=diff, quiet=quiet, verbose=verbose)
+
+ if code is not None:
+ reformat_code(
+ content=code,
+ fast=fast,
+ write_back=write_back,
+ mode=mode,
+ report=report,
+ lines=lines,
+ )
+ else:
+ assert root is not None # root is only None if code is not None
+ try:
+ sources = get_sources(
+ root=root,
+ src=src,
+ quiet=quiet,
+ verbose=verbose,
+ include=include,
+ exclude=exclude,
+ extend_exclude=extend_exclude,
+ force_exclude=force_exclude,
+ report=report,
+ stdin_filename=stdin_filename,
+ )
+ except GitWildMatchPatternError:
+ ctx.exit(1)
+
+ if not sources:
+ if verbose or not quiet:
+ out("No Python files are present to be formatted. Nothing to do 😴")
+ if "-" in src:
+ sys.stdout.write(sys.stdin.read())
+ ctx.exit(0)
+
+ if len(sources) == 1:
+ reformat_one(
+ src=sources.pop(),
+ fast=fast,
+ write_back=write_back,
+ mode=mode,
+ report=report,
+ lines=lines,
+ no_cache=no_cache,
+ )
+ else:
+ from black.concurrency import reformat_many
+
+ if lines:
+ err("Cannot use --line-ranges to format multiple files.")
+ ctx.exit(1)
+ reformat_many(
+ sources=sources,
+ fast=fast,
+ write_back=write_back,
+ mode=mode,
+ report=report,
+ workers=workers,
+ no_cache=no_cache,
+ )
+
+ if verbose or not quiet:
+ if code is None and (verbose or report.change_count or report.failure_count):
+ out()
+ out(error_msg if report.return_code else "All done! ✨ 🍰 ✨")
+ if code is None:
+ click.echo(str(report), err=True)
+ ctx.exit(report.return_code)
+
+
+def get_sources(
+ *,
+ root: Path,
+ src: tuple[str, ...],
+ quiet: bool,
+ verbose: bool,
+ include: Pattern[str],
+ exclude: Pattern[str] | None,
+ extend_exclude: Pattern[str] | None,
+ force_exclude: Pattern[str] | None,
+ report: "Report",
+ stdin_filename: str | None,
+) -> set[Path]:
+ """Compute the set of files to be formatted."""
+ sources: set[Path] = set()
+
+ assert root.is_absolute(), f"INTERNAL ERROR: `root` must be absolute but is {root}"
+ using_default_exclude = exclude is None
+ exclude = re_compile_maybe_verbose(DEFAULT_EXCLUDES) if exclude is None else exclude
+ gitignore: dict[Path, PathSpec] | None = None
+ root_gitignore = get_gitignore(root)
+
+ for s in src:
+ if s == "-" and stdin_filename:
+ path = Path(stdin_filename)
+ if path_is_excluded(stdin_filename, force_exclude):
+ report.path_ignored(
+ path,
+ "--stdin-filename matches the --force-exclude regular expression",
+ )
+ continue
+ is_stdin = True
+ else:
+ path = Path(s)
+ is_stdin = False
+
+ # Compare the logic here to the logic in `gen_python_files`.
+ if is_stdin or path.is_file():
+ if resolves_outside_root_or_cannot_stat(path, root, report):
+ if verbose:
+ out(f'Skipping invalid source: "{path}"', fg="red")
+ continue
+
+ root_relative_path = best_effort_relative_path(path, root).as_posix()
+ root_relative_path = "/" + root_relative_path
+
+ # Hard-exclude any files that matches the `--force-exclude` regex.
+ if path_is_excluded(root_relative_path, force_exclude):
+ report.path_ignored(
+ path, "matches the --force-exclude regular expression"
+ )
+ continue
+
+ if is_stdin:
+ path = Path(f"{STDIN_PLACEHOLDER}{path}")
+
+ if path.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
+ warn=verbose or not quiet
+ ):
+ continue
+
+ if verbose:
+ out(f'Found input source: "{path}"', fg="blue")
+ sources.add(path)
+ elif path.is_dir():
+ path = root / (path.resolve().relative_to(root))
+ if verbose:
+ out(f'Found input source directory: "{path}"', fg="blue")
+
+ if using_default_exclude:
+ gitignore = {
+ root: root_gitignore,
+ path: get_gitignore(path),
+ }
+ sources.update(
+ gen_python_files(
+ path.iterdir(),
+ root,
+ include,
+ exclude,
+ extend_exclude,
+ force_exclude,
+ report,
+ gitignore,
+ verbose=verbose,
+ quiet=quiet,
+ )
+ )
+ elif s == "-":
+ if verbose:
+ out("Found input source stdin", fg="blue")
+ sources.add(path)
+ else:
+ err(f"invalid path: {s}")
+
+ return sources
+
+
+def reformat_code(
+ content: str,
+ fast: bool,
+ write_back: WriteBack,
+ mode: Mode,
+ report: Report,
+ *,
+ lines: Collection[tuple[int, int]] = (),
+) -> None:
+ """
+ Reformat and print out `content` without spawning child processes.
+ Similar to `reformat_one`, but for string content.
+
+ `fast`, `write_back`, and `mode` options are passed to
+ :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
+ """
+ path = Path("")
+ try:
+ changed = Changed.NO
+ if format_stdin_to_stdout(
+ content=content, fast=fast, write_back=write_back, mode=mode, lines=lines
+ ):
+ changed = Changed.YES
+ report.done(path, changed)
+ except Exception as exc:
+ if report.verbose:
+ traceback.print_exc()
+ report.failed(path, str(exc))
+
+
+# diff-shades depends on being to monkeypatch this function to operate. I know it's
+# not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
+@mypyc_attr(patchable=True)
+def reformat_one(
+ src: Path,
+ fast: bool,
+ write_back: WriteBack,
+ mode: Mode,
+ report: "Report",
+ *,
+ lines: Collection[tuple[int, int]] = (),
+ no_cache: bool = False,
+) -> None:
+ """Reformat a single file under `src` without spawning child processes.
+
+ `fast`, `write_back`, and `mode` options are passed to
+ :func:`format_file_in_place` or :func:`format_stdin_to_stdout`.
+ """
+ try:
+ changed = Changed.NO
+
+ if str(src) == "-":
+ is_stdin = True
+ elif str(src).startswith(STDIN_PLACEHOLDER):
+ is_stdin = True
+ # Use the original name again in case we want to print something
+ # to the user
+ src = Path(str(src)[len(STDIN_PLACEHOLDER) :])
+ else:
+ is_stdin = False
+
+ if is_stdin:
+ if src.suffix == ".pyi":
+ mode = replace(mode, is_pyi=True)
+ elif src.suffix == ".ipynb":
+ mode = replace(mode, is_ipynb=True)
+ if format_stdin_to_stdout(
+ fast=fast, write_back=write_back, mode=mode, lines=lines
+ ):
+ changed = Changed.YES
+ else:
+ cache = None if no_cache else Cache.read(mode)
+ if cache is not None and write_back not in (
+ WriteBack.DIFF,
+ WriteBack.COLOR_DIFF,
+ ):
+ if not cache.is_changed(src):
+ changed = Changed.CACHED
+ if changed is not Changed.CACHED and format_file_in_place(
+ src, fast=fast, write_back=write_back, mode=mode, lines=lines
+ ):
+ changed = Changed.YES
+ if cache is not None and (
+ (write_back is WriteBack.YES and changed is not Changed.CACHED)
+ or (write_back is WriteBack.CHECK and changed is Changed.NO)
+ ):
+ cache.write([src])
+ report.done(src, changed)
+ except Exception as exc:
+ if report.verbose:
+ traceback.print_exc()
+ report.failed(src, str(exc))
+
+
+def format_file_in_place(
+ src: Path,
+ fast: bool,
+ mode: Mode,
+ write_back: WriteBack = WriteBack.NO,
+ lock: Any = None, # multiprocessing.Manager().Lock() is some crazy proxy
+ *,
+ lines: Collection[tuple[int, int]] = (),
+) -> bool:
+ """Format file under `src` path. Return True if changed.
+
+ If `write_back` is DIFF, write a diff to stdout. If it is YES, write reformatted
+ code to the file.
+ `mode` and `fast` options are passed to :func:`format_file_contents`.
+ """
+ if src.suffix == ".pyi":
+ mode = replace(mode, is_pyi=True)
+ elif src.suffix == ".ipynb":
+ mode = replace(mode, is_ipynb=True)
+
+ then = datetime.fromtimestamp(src.stat().st_mtime, timezone.utc)
+ header = b""
+ with open(src, "rb") as buf:
+ if mode.skip_source_first_line:
+ header = buf.readline()
+ src_contents, encoding, newline = decode_bytes(buf.read(), mode)
+ try:
+ dst_contents = format_file_contents(
+ src_contents, fast=fast, mode=mode, lines=lines
+ )
+ except NothingChanged:
+ return False
+ except JSONDecodeError:
+ raise ValueError(
+ f"File '{src}' cannot be parsed as valid Jupyter notebook."
+ ) from None
+ src_contents = header.decode(encoding) + src_contents
+ dst_contents = header.decode(encoding) + dst_contents
+
+ if write_back == WriteBack.YES:
+ with open(src, "w", encoding=encoding, newline=newline) as f:
+ f.write(dst_contents)
+ elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
+ now = datetime.now(timezone.utc)
+ src_name = f"{src}\t{then}"
+ dst_name = f"{src}\t{now}"
+ if mode.is_ipynb:
+ diff_contents = ipynb_diff(src_contents, dst_contents, src_name, dst_name)
+ else:
+ diff_contents = diff(src_contents, dst_contents, src_name, dst_name)
+
+ if write_back == WriteBack.COLOR_DIFF:
+ diff_contents = color_diff(diff_contents)
+
+ with lock or nullcontext():
+ f = io.TextIOWrapper(
+ sys.stdout.buffer,
+ encoding=encoding,
+ newline=newline,
+ write_through=True,
+ )
+ f = wrap_stream_for_windows(f)
+ f.write(diff_contents)
+ f.detach()
+
+ return True
+
+
+def format_stdin_to_stdout(
+ fast: bool,
+ *,
+ content: str | None = None,
+ write_back: WriteBack = WriteBack.NO,
+ mode: Mode,
+ lines: Collection[tuple[int, int]] = (),
+) -> bool:
+ """Format file on stdin. Return True if changed.
+
+ If content is None, it's read from sys.stdin.
+
+ If `write_back` is YES, write reformatted code back to stdout. If it is DIFF,
+ write a diff to stdout. The `mode` argument is passed to
+ :func:`format_file_contents`.
+ """
+ then = datetime.now(timezone.utc)
+
+ if content is None:
+ src, encoding, newline = decode_bytes(sys.stdin.buffer.read(), mode)
+ elif Preview.normalize_cr_newlines in mode:
+ src, encoding, newline = content, "utf-8", "\n"
+ else:
+ src, encoding, newline = content, "utf-8", ""
+
+ dst = src
+ try:
+ dst = format_file_contents(src, fast=fast, mode=mode, lines=lines)
+ return True
+
+ except NothingChanged:
+ return False
+
+ finally:
+ f = io.TextIOWrapper(
+ sys.stdout.buffer, encoding=encoding, newline=newline, write_through=True
+ )
+ if write_back == WriteBack.YES:
+ # Make sure there's a newline after the content
+ if Preview.normalize_cr_newlines in mode:
+ if dst and dst[-1] != "\n" and dst[-1] != "\r":
+ dst += newline
+ else:
+ if dst and dst[-1] != "\n":
+ dst += "\n"
+ f.write(dst)
+ elif write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
+ now = datetime.now(timezone.utc)
+ src_name = f"STDIN\t{then}"
+ dst_name = f"STDOUT\t{now}"
+ d = diff(src, dst, src_name, dst_name)
+ if write_back == WriteBack.COLOR_DIFF:
+ d = color_diff(d)
+ f = wrap_stream_for_windows(f)
+ f.write(d)
+ f.detach()
+
+
+def check_stability_and_equivalence(
+ src_contents: str,
+ dst_contents: str,
+ *,
+ mode: Mode,
+ lines: Collection[tuple[int, int]] = (),
+) -> None:
+ """Perform stability and equivalence checks.
+
+ Raise AssertionError if source and destination contents are not
+ equivalent, or if a second pass of the formatter would format the
+ content differently.
+ """
+ assert_equivalent(src_contents, dst_contents)
+ assert_stable(src_contents, dst_contents, mode=mode, lines=lines)
+
+
+def format_file_contents(
+ src_contents: str,
+ *,
+ fast: bool,
+ mode: Mode,
+ lines: Collection[tuple[int, int]] = (),
+) -> FileContent:
+ """Reformat contents of a file and return new contents.
+
+ If `fast` is False, additionally confirm that the reformatted code is
+ valid by calling :func:`assert_equivalent` and :func:`assert_stable` on it.
+ `mode` is passed to :func:`format_str`.
+ """
+ if mode.is_ipynb:
+ dst_contents = format_ipynb_string(src_contents, fast=fast, mode=mode)
+ else:
+ dst_contents = format_str(src_contents, mode=mode, lines=lines)
+ if src_contents == dst_contents:
+ raise NothingChanged
+
+ if not fast and not mode.is_ipynb:
+ # Jupyter notebooks will already have been checked above.
+ check_stability_and_equivalence(
+ src_contents, dst_contents, mode=mode, lines=lines
+ )
+ return dst_contents
+
+
+def format_cell(src: str, *, fast: bool, mode: Mode) -> str:
+ """Format code in given cell of Jupyter notebook.
+
+ General idea is:
+
+ - if cell has trailing semicolon, remove it;
+ - if cell has IPython magics, mask them;
+ - format cell;
+ - reinstate IPython magics;
+ - reinstate trailing semicolon (if originally present);
+ - strip trailing newlines.
+
+ Cells with syntax errors will not be processed, as they
+ could potentially be automagics or multi-line magics, which
+ are currently not supported.
+ """
+ validate_cell(src, mode)
+ src_without_trailing_semicolon, has_trailing_semicolon = remove_trailing_semicolon(
+ src
+ )
+ try:
+ masked_src, replacements = mask_cell(src_without_trailing_semicolon)
+ except SyntaxError:
+ raise NothingChanged from None
+ masked_dst = format_str(masked_src, mode=mode)
+ if not fast:
+ check_stability_and_equivalence(masked_src, masked_dst, mode=mode)
+ dst_without_trailing_semicolon = unmask_cell(masked_dst, replacements)
+ dst = put_trailing_semicolon_back(
+ dst_without_trailing_semicolon, has_trailing_semicolon
+ )
+ dst = dst.rstrip("\n")
+ if dst == src:
+ raise NothingChanged from None
+ return dst
+
+
+def validate_metadata(nb: MutableMapping[str, Any]) -> None:
+ """If notebook is marked as non-Python, don't format it.
+
+ All notebook metadata fields are optional, see
+ https://nbformat.readthedocs.io/en/latest/format_description.html. So
+ if a notebook has empty metadata, we will try to parse it anyway.
+ """
+ language = nb.get("metadata", {}).get("language_info", {}).get("name", None)
+ if language is not None and language != "python":
+ raise NothingChanged from None
+
+
+def format_ipynb_string(src_contents: str, *, fast: bool, mode: Mode) -> FileContent:
+ """Format Jupyter notebook.
+
+ Operate cell-by-cell, only on code cells, only for Python notebooks.
+ If the ``.ipynb`` originally had a trailing newline, it'll be preserved.
+ """
+ if not src_contents:
+ raise NothingChanged
+
+ trailing_newline = src_contents[-1] == "\n"
+ modified = False
+ nb = json.loads(src_contents)
+ validate_metadata(nb)
+ for cell in nb["cells"]:
+ if cell.get("cell_type", None) == "code":
+ try:
+ src = "".join(cell["source"])
+ dst = format_cell(src, fast=fast, mode=mode)
+ except NothingChanged:
+ pass
+ else:
+ cell["source"] = dst.splitlines(keepends=True)
+ modified = True
+ if modified:
+ dst_contents = json.dumps(nb, indent=1, ensure_ascii=False)
+ if trailing_newline:
+ dst_contents = dst_contents + "\n"
+ return dst_contents
+ else:
+ raise NothingChanged
+
+
+def format_str(
+ src_contents: str, *, mode: Mode, lines: Collection[tuple[int, int]] = ()
+) -> str:
+ """Reformat a string and return new contents.
+
+ `mode` determines formatting options, such as how many characters per line are
+ allowed. Example:
+
+ >>> import black
+ >>> print(black.format_str("def f(arg:str='')->None:...", mode=black.Mode()))
+ def f(arg: str = "") -> None:
+ ...
+
+ A more complex example:
+
+ >>> print(
+ ... black.format_str(
+ ... "def f(arg:str='')->None: hey",
+ ... mode=black.Mode(
+ ... target_versions={black.TargetVersion.PY36},
+ ... line_length=10,
+ ... string_normalization=False,
+ ... is_pyi=False,
+ ... ),
+ ... ),
+ ... )
+ def f(
+ arg: str = '',
+ ) -> None:
+ hey
+
+ """
+ if lines:
+ lines = sanitized_lines(lines, src_contents)
+ if not lines:
+ return src_contents # Nothing to format
+ dst_contents = _format_str_once(src_contents, mode=mode, lines=lines)
+ # Forced second pass to work around optional trailing commas (becoming
+ # forced trailing commas on pass 2) interacting differently with optional
+ # parentheses. Admittedly ugly.
+ if src_contents != dst_contents:
+ if lines:
+ lines = adjusted_lines(lines, src_contents, dst_contents)
+ return _format_str_once(dst_contents, mode=mode, lines=lines)
+ return dst_contents
+
+
+def _format_str_once(
+ src_contents: str, *, mode: Mode, lines: Collection[tuple[int, int]] = ()
+) -> str:
+ if Preview.normalize_cr_newlines in mode:
+ normalized_contents, _, newline_type = decode_bytes(
+ src_contents.encode("utf-8"), mode
+ )
+
+ src_node = lib2to3_parse(
+ normalized_contents.lstrip(), target_versions=mode.target_versions
+ )
+ else:
+ src_node = lib2to3_parse(src_contents.lstrip(), mode.target_versions)
+
+ dst_blocks: list[LinesBlock] = []
+ if mode.target_versions:
+ versions = mode.target_versions
+ else:
+ future_imports = get_future_imports(src_node)
+ versions = detect_target_versions(src_node, future_imports=future_imports)
+
+ line_generation_features = {
+ feature
+ for feature in {
+ Feature.PARENTHESIZED_CONTEXT_MANAGERS,
+ Feature.UNPARENTHESIZED_EXCEPT_TYPES,
+ Feature.T_STRINGS,
+ }
+ if supports_feature(versions, feature)
+ }
+ normalize_fmt_off(src_node, mode, lines)
+ if lines:
+ # This should be called after normalize_fmt_off.
+ convert_unchanged_lines(src_node, lines)
+
+ line_generator = LineGenerator(mode=mode, features=line_generation_features)
+ elt = EmptyLineTracker(mode=mode)
+ split_line_features = {
+ feature
+ for feature in {
+ Feature.TRAILING_COMMA_IN_CALL,
+ Feature.TRAILING_COMMA_IN_DEF,
+ }
+ if supports_feature(versions, feature)
+ }
+ block: LinesBlock | None = None
+ for current_line in line_generator.visit(src_node):
+ block = elt.maybe_empty_lines(current_line)
+ dst_blocks.append(block)
+ for line in transform_line(
+ current_line, mode=mode, features=split_line_features
+ ):
+ block.content_lines.append(str(line))
+ if dst_blocks:
+ dst_blocks[-1].after = 0
+ dst_contents = []
+ for block in dst_blocks:
+ dst_contents.extend(block.all_lines())
+ if not dst_contents:
+ if Preview.normalize_cr_newlines in mode:
+ if "\n" in normalized_contents:
+ return newline_type
+ else:
+ # Use decode_bytes to retrieve the correct source newline (CRLF or LF),
+ # and check if normalized_content has more than one line
+ normalized_content, _, newline = decode_bytes(
+ src_contents.encode("utf-8"), mode
+ )
+ if "\n" in normalized_content:
+ return newline
+ return ""
+ if Preview.normalize_cr_newlines in mode:
+ return "".join(dst_contents).replace("\n", newline_type)
+ else:
+ return "".join(dst_contents)
+
+
+def decode_bytes(src: bytes, mode: Mode) -> tuple[FileContent, Encoding, NewLine]:
+ """Return a tuple of (decoded_contents, encoding, newline).
+
+ `newline` is either CRLF or LF but `decoded_contents` is decoded with
+ universal newlines (i.e. only contains LF).
+ """
+ srcbuf = io.BytesIO(src)
+ encoding, lines = tokenize.detect_encoding(srcbuf.readline)
+ if not lines:
+ return "", encoding, "\n"
+
+ if Preview.normalize_cr_newlines in mode:
+ if lines[0][-2:] == b"\r\n":
+ if b"\r" in lines[0][:-2]:
+ newline = "\r"
+ else:
+ newline = "\r\n"
+ elif lines[0][-1:] == b"\n":
+ if b"\r" in lines[0][:-1]:
+ newline = "\r"
+ else:
+ newline = "\n"
+ else:
+ if b"\r" in lines[0]:
+ newline = "\r"
+ else:
+ newline = "\n"
+ else:
+ newline = "\r\n" if lines[0][-2:] == b"\r\n" else "\n"
+
+ srcbuf.seek(0)
+ with io.TextIOWrapper(srcbuf, encoding) as tiow:
+ return tiow.read(), encoding, newline
+
+
+def get_features_used(
+ node: Node, *, future_imports: set[str] | None = None
+) -> set[Feature]:
+ """Return a set of (relatively) new Python features used in this file.
+
+ Currently looking for:
+ - f-strings;
+ - self-documenting expressions in f-strings (f"{x=}");
+ - underscores in numeric literals;
+ - trailing commas after * or ** in function signatures and calls;
+ - positional only arguments in function signatures and lambdas;
+ - assignment expression;
+ - relaxed decorator syntax;
+ - usage of __future__ flags (annotations);
+ - print / exec statements;
+ - parenthesized context managers;
+ - match statements;
+ - except* clause;
+ - variadic generics;
+ """
+ features: set[Feature] = set()
+ if future_imports:
+ features |= {
+ FUTURE_FLAG_TO_FEATURE[future_import]
+ for future_import in future_imports
+ if future_import in FUTURE_FLAG_TO_FEATURE
+ }
+
+ for n in node.pre_order():
+ if n.type == token.FSTRING_START:
+ features.add(Feature.F_STRINGS)
+ elif n.type == token.TSTRING_START:
+ features.add(Feature.T_STRINGS)
+ elif (
+ n.type == token.RBRACE
+ and n.parent is not None
+ and any(child.type == token.EQUAL for child in n.parent.children)
+ ):
+ features.add(Feature.DEBUG_F_STRINGS)
+
+ elif is_number_token(n):
+ if "_" in n.value:
+ features.add(Feature.NUMERIC_UNDERSCORES)
+
+ elif n.type == token.SLASH:
+ if n.parent and n.parent.type in {
+ syms.typedargslist,
+ syms.arglist,
+ syms.varargslist,
+ }:
+ features.add(Feature.POS_ONLY_ARGUMENTS)
+
+ elif n.type == token.COLONEQUAL:
+ features.add(Feature.ASSIGNMENT_EXPRESSIONS)
+
+ elif n.type == syms.decorator:
+ if len(n.children) > 1 and not is_simple_decorator_expression(
+ n.children[1]
+ ):
+ features.add(Feature.RELAXED_DECORATORS)
+
+ elif (
+ n.type in {syms.typedargslist, syms.arglist}
+ and n.children
+ and n.children[-1].type == token.COMMA
+ ):
+ if n.type == syms.typedargslist:
+ feature = Feature.TRAILING_COMMA_IN_DEF
+ else:
+ feature = Feature.TRAILING_COMMA_IN_CALL
+
+ for ch in n.children:
+ if ch.type in STARS:
+ features.add(feature)
+
+ if ch.type == syms.argument:
+ for argch in ch.children:
+ if argch.type in STARS:
+ features.add(feature)
+
+ elif (
+ n.type in {syms.return_stmt, syms.yield_expr}
+ and len(n.children) >= 2
+ and n.children[1].type == syms.testlist_star_expr
+ and any(child.type == syms.star_expr for child in n.children[1].children)
+ ):
+ features.add(Feature.UNPACKING_ON_FLOW)
+
+ elif (
+ n.type == syms.annassign
+ and len(n.children) >= 4
+ and n.children[3].type == syms.testlist_star_expr
+ ):
+ features.add(Feature.ANN_ASSIGN_EXTENDED_RHS)
+
+ elif (
+ n.type == syms.with_stmt
+ and len(n.children) > 2
+ and n.children[1].type == syms.atom
+ ):
+ atom_children = n.children[1].children
+ if (
+ len(atom_children) == 3
+ and atom_children[0].type == token.LPAR
+ and _contains_asexpr(atom_children[1])
+ and atom_children[2].type == token.RPAR
+ ):
+ features.add(Feature.PARENTHESIZED_CONTEXT_MANAGERS)
+
+ elif n.type == syms.match_stmt:
+ features.add(Feature.PATTERN_MATCHING)
+
+ elif n.type in {syms.subscriptlist, syms.trailer} and any(
+ child.type == syms.star_expr for child in n.children
+ ):
+ features.add(Feature.VARIADIC_GENERICS)
+
+ elif (
+ n.type == syms.tname_star
+ and len(n.children) == 3
+ and n.children[2].type == syms.star_expr
+ ):
+ features.add(Feature.VARIADIC_GENERICS)
+
+ elif n.type in (syms.type_stmt, syms.typeparams):
+ features.add(Feature.TYPE_PARAMS)
+
+ elif (
+ n.type in (syms.typevartuple, syms.paramspec, syms.typevar)
+ and n.children[-2].type == token.EQUAL
+ ):
+ features.add(Feature.TYPE_PARAM_DEFAULTS)
+
+ elif (
+ n.type == syms.except_clause
+ and len(n.children) >= 2
+ and (
+ n.children[1].type == token.STAR or n.children[1].type == syms.testlist
+ )
+ ):
+ is_star_except = n.children[1].type == token.STAR
+
+ if is_star_except:
+ features.add(Feature.EXCEPT_STAR)
+
+ # Presence of except* pushes as clause 1 index back
+ has_as_clause = (
+ len(n.children) >= is_star_except + 3
+ and n.children[is_star_except + 2].type == token.NAME
+ and n.children[is_star_except + 2].value == "as" # type: ignore
+ )
+
+ # If there's no 'as' clause and the except expression is a testlist.
+ if not has_as_clause and (
+ (is_star_except and n.children[2].type == syms.testlist)
+ or (not is_star_except and n.children[1].type == syms.testlist)
+ ):
+ features.add(Feature.UNPARENTHESIZED_EXCEPT_TYPES)
+
+ return features
+
+
+def _contains_asexpr(node: Node | Leaf) -> bool:
+ """Return True if `node` contains an as-pattern."""
+ if node.type == syms.asexpr_test:
+ return True
+ elif node.type == syms.atom:
+ if (
+ len(node.children) == 3
+ and node.children[0].type == token.LPAR
+ and node.children[2].type == token.RPAR
+ ):
+ return _contains_asexpr(node.children[1])
+ elif node.type == syms.testlist_gexp:
+ return any(_contains_asexpr(child) for child in node.children)
+ return False
+
+
+def detect_target_versions(
+ node: Node, *, future_imports: set[str] | None = None
+) -> set[TargetVersion]:
+ """Detect the version to target based on the nodes used."""
+ features = get_features_used(node, future_imports=future_imports)
+ return {
+ version for version in TargetVersion if features <= VERSION_TO_FEATURES[version]
+ }
+
+
+def get_future_imports(node: Node) -> set[str]:
+ """Return a set of __future__ imports in the file."""
+ imports: set[str] = set()
+
+ def get_imports_from_children(children: list[LN]) -> Generator[str, None, None]:
+ for child in children:
+ if isinstance(child, Leaf):
+ if child.type == token.NAME:
+ yield child.value
+
+ elif child.type == syms.import_as_name:
+ orig_name = child.children[0]
+ assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports"
+ assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
+ yield orig_name.value
+
+ elif child.type == syms.import_as_names:
+ yield from get_imports_from_children(child.children)
+
+ else:
+ raise AssertionError("Invalid syntax parsing imports")
+
+ for child in node.children:
+ if child.type != syms.simple_stmt:
+ break
+
+ first_child = child.children[0]
+ if isinstance(first_child, Leaf):
+ # Continue looking if we see a docstring; otherwise stop.
+ if (
+ len(child.children) == 2
+ and first_child.type == token.STRING
+ and child.children[1].type == token.NEWLINE
+ ):
+ continue
+
+ break
+
+ elif first_child.type == syms.import_from:
+ module_name = first_child.children[1]
+ if not isinstance(module_name, Leaf) or module_name.value != "__future__":
+ break
+
+ imports |= set(get_imports_from_children(first_child.children[3:]))
+ else:
+ break
+
+ return imports
+
+
+def _black_info() -> str:
+ return (
+ f"Black {__version__} on "
+ f"Python ({platform.python_implementation()}) {platform.python_version()}"
+ )
+
+
+def assert_equivalent(src: str, dst: str) -> None:
+ """Raise AssertionError if `src` and `dst` aren't equivalent."""
+ try:
+ src_ast = parse_ast(src)
+ except Exception as exc:
+ raise ASTSafetyError(
+ "cannot use --safe with this file; failed to parse source file AST: "
+ f"{exc}\n"
+ "This could be caused by running Black with an older Python version "
+ "that does not support new syntax used in your source file."
+ ) from exc
+
+ try:
+ dst_ast = parse_ast(dst)
+ except Exception as exc:
+ log = dump_to_file("".join(traceback.format_tb(exc.__traceback__)), dst)
+ raise ASTSafetyError(
+ f"INTERNAL ERROR: {_black_info()} produced invalid code: {exc}. "
+ "Please report a bug on https://github.com/psf/black/issues. "
+ f"This invalid output might be helpful: {log}"
+ ) from None
+
+ src_ast_str = "\n".join(stringify_ast(src_ast))
+ dst_ast_str = "\n".join(stringify_ast(dst_ast))
+ if src_ast_str != dst_ast_str:
+ log = dump_to_file(diff(src_ast_str, dst_ast_str, "src", "dst"))
+ raise ASTSafetyError(
+ f"INTERNAL ERROR: {_black_info()} produced code that is not equivalent to"
+ " the source. Please report a bug on https://github.com/psf/black/issues."
+ f" This diff might be helpful: {log}"
+ ) from None
+
+
+def assert_stable(
+ src: str, dst: str, mode: Mode, *, lines: Collection[tuple[int, int]] = ()
+) -> None:
+ """Raise AssertionError if `dst` reformats differently the second time."""
+ if lines:
+ # Formatting specified lines requires `adjusted_lines` to map original lines
+ # to the formatted lines before re-formatting the previously formatted result.
+ # Due to less-ideal diff algorithm, some edge cases produce incorrect new line
+ # ranges. Hence for now, we skip the stable check.
+ # See https://github.com/psf/black/issues/4033 for context.
+ return
+ # We shouldn't call format_str() here, because that formats the string
+ # twice and may hide a bug where we bounce back and forth between two
+ # versions.
+ newdst = _format_str_once(dst, mode=mode, lines=lines)
+ if dst != newdst:
+ log = dump_to_file(
+ str(mode),
+ diff(src, dst, "source", "first pass"),
+ diff(dst, newdst, "first pass", "second pass"),
+ )
+ raise AssertionError(
+ f"INTERNAL ERROR: {_black_info()} produced different code on the second"
+ " pass of the formatter. Please report a bug on"
+ f" https://github.com/psf/black/issues. This diff might be helpful: {log}"
+ ) from None
+
+
+def patched_main() -> None:
+ # PyInstaller patches multiprocessing to need freeze_support() even in non-Windows
+ # environments so just assume we always need to call it if frozen.
+ if getattr(sys, "frozen", False):
+ from multiprocessing import freeze_support
+
+ freeze_support()
+
+ main()
+
+
+if __name__ == "__main__":
+ patched_main()
diff --git a/py311/lib/python3.11/site-packages/black/__main__.py b/py311/lib/python3.11/site-packages/black/__main__.py
new file mode 100644
index 0000000000000000000000000000000000000000..19b810b530acb20f7cf790f2b902f717ef1d8c03
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/__main__.py
@@ -0,0 +1,3 @@
+from black import patched_main
+
+patched_main()
diff --git a/py311/lib/python3.11/site-packages/black/_width_table.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/_width_table.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..f784cf097b67567345b38ea4fd79d329548e4312
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/_width_table.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/_width_table.py b/py311/lib/python3.11/site-packages/black/_width_table.py
new file mode 100644
index 0000000000000000000000000000000000000000..5f6ff9febc33d69b27746b5574899116c28c82da
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/_width_table.py
@@ -0,0 +1,478 @@
+# Generated by make_width_table.py
+# wcwidth 0.2.6
+# Unicode 15.0.0
+from typing import Final
+
+WIDTH_TABLE: Final[list[tuple[int, int, int]]] = [
+ (0, 0, 0),
+ (1, 31, -1),
+ (127, 159, -1),
+ (768, 879, 0),
+ (1155, 1161, 0),
+ (1425, 1469, 0),
+ (1471, 1471, 0),
+ (1473, 1474, 0),
+ (1476, 1477, 0),
+ (1479, 1479, 0),
+ (1552, 1562, 0),
+ (1611, 1631, 0),
+ (1648, 1648, 0),
+ (1750, 1756, 0),
+ (1759, 1764, 0),
+ (1767, 1768, 0),
+ (1770, 1773, 0),
+ (1809, 1809, 0),
+ (1840, 1866, 0),
+ (1958, 1968, 0),
+ (2027, 2035, 0),
+ (2045, 2045, 0),
+ (2070, 2073, 0),
+ (2075, 2083, 0),
+ (2085, 2087, 0),
+ (2089, 2093, 0),
+ (2137, 2139, 0),
+ (2200, 2207, 0),
+ (2250, 2273, 0),
+ (2275, 2306, 0),
+ (2362, 2362, 0),
+ (2364, 2364, 0),
+ (2369, 2376, 0),
+ (2381, 2381, 0),
+ (2385, 2391, 0),
+ (2402, 2403, 0),
+ (2433, 2433, 0),
+ (2492, 2492, 0),
+ (2497, 2500, 0),
+ (2509, 2509, 0),
+ (2530, 2531, 0),
+ (2558, 2558, 0),
+ (2561, 2562, 0),
+ (2620, 2620, 0),
+ (2625, 2626, 0),
+ (2631, 2632, 0),
+ (2635, 2637, 0),
+ (2641, 2641, 0),
+ (2672, 2673, 0),
+ (2677, 2677, 0),
+ (2689, 2690, 0),
+ (2748, 2748, 0),
+ (2753, 2757, 0),
+ (2759, 2760, 0),
+ (2765, 2765, 0),
+ (2786, 2787, 0),
+ (2810, 2815, 0),
+ (2817, 2817, 0),
+ (2876, 2876, 0),
+ (2879, 2879, 0),
+ (2881, 2884, 0),
+ (2893, 2893, 0),
+ (2901, 2902, 0),
+ (2914, 2915, 0),
+ (2946, 2946, 0),
+ (3008, 3008, 0),
+ (3021, 3021, 0),
+ (3072, 3072, 0),
+ (3076, 3076, 0),
+ (3132, 3132, 0),
+ (3134, 3136, 0),
+ (3142, 3144, 0),
+ (3146, 3149, 0),
+ (3157, 3158, 0),
+ (3170, 3171, 0),
+ (3201, 3201, 0),
+ (3260, 3260, 0),
+ (3263, 3263, 0),
+ (3270, 3270, 0),
+ (3276, 3277, 0),
+ (3298, 3299, 0),
+ (3328, 3329, 0),
+ (3387, 3388, 0),
+ (3393, 3396, 0),
+ (3405, 3405, 0),
+ (3426, 3427, 0),
+ (3457, 3457, 0),
+ (3530, 3530, 0),
+ (3538, 3540, 0),
+ (3542, 3542, 0),
+ (3633, 3633, 0),
+ (3636, 3642, 0),
+ (3655, 3662, 0),
+ (3761, 3761, 0),
+ (3764, 3772, 0),
+ (3784, 3790, 0),
+ (3864, 3865, 0),
+ (3893, 3893, 0),
+ (3895, 3895, 0),
+ (3897, 3897, 0),
+ (3953, 3966, 0),
+ (3968, 3972, 0),
+ (3974, 3975, 0),
+ (3981, 3991, 0),
+ (3993, 4028, 0),
+ (4038, 4038, 0),
+ (4141, 4144, 0),
+ (4146, 4151, 0),
+ (4153, 4154, 0),
+ (4157, 4158, 0),
+ (4184, 4185, 0),
+ (4190, 4192, 0),
+ (4209, 4212, 0),
+ (4226, 4226, 0),
+ (4229, 4230, 0),
+ (4237, 4237, 0),
+ (4253, 4253, 0),
+ (4352, 4447, 2),
+ (4957, 4959, 0),
+ (5906, 5908, 0),
+ (5938, 5939, 0),
+ (5970, 5971, 0),
+ (6002, 6003, 0),
+ (6068, 6069, 0),
+ (6071, 6077, 0),
+ (6086, 6086, 0),
+ (6089, 6099, 0),
+ (6109, 6109, 0),
+ (6155, 6157, 0),
+ (6159, 6159, 0),
+ (6277, 6278, 0),
+ (6313, 6313, 0),
+ (6432, 6434, 0),
+ (6439, 6440, 0),
+ (6450, 6450, 0),
+ (6457, 6459, 0),
+ (6679, 6680, 0),
+ (6683, 6683, 0),
+ (6742, 6742, 0),
+ (6744, 6750, 0),
+ (6752, 6752, 0),
+ (6754, 6754, 0),
+ (6757, 6764, 0),
+ (6771, 6780, 0),
+ (6783, 6783, 0),
+ (6832, 6862, 0),
+ (6912, 6915, 0),
+ (6964, 6964, 0),
+ (6966, 6970, 0),
+ (6972, 6972, 0),
+ (6978, 6978, 0),
+ (7019, 7027, 0),
+ (7040, 7041, 0),
+ (7074, 7077, 0),
+ (7080, 7081, 0),
+ (7083, 7085, 0),
+ (7142, 7142, 0),
+ (7144, 7145, 0),
+ (7149, 7149, 0),
+ (7151, 7153, 0),
+ (7212, 7219, 0),
+ (7222, 7223, 0),
+ (7376, 7378, 0),
+ (7380, 7392, 0),
+ (7394, 7400, 0),
+ (7405, 7405, 0),
+ (7412, 7412, 0),
+ (7416, 7417, 0),
+ (7616, 7679, 0),
+ (8203, 8207, 0),
+ (8232, 8238, 0),
+ (8288, 8291, 0),
+ (8400, 8432, 0),
+ (8986, 8987, 2),
+ (9001, 9002, 2),
+ (9193, 9196, 2),
+ (9200, 9200, 2),
+ (9203, 9203, 2),
+ (9725, 9726, 2),
+ (9748, 9749, 2),
+ (9800, 9811, 2),
+ (9855, 9855, 2),
+ (9875, 9875, 2),
+ (9889, 9889, 2),
+ (9898, 9899, 2),
+ (9917, 9918, 2),
+ (9924, 9925, 2),
+ (9934, 9934, 2),
+ (9940, 9940, 2),
+ (9962, 9962, 2),
+ (9970, 9971, 2),
+ (9973, 9973, 2),
+ (9978, 9978, 2),
+ (9981, 9981, 2),
+ (9989, 9989, 2),
+ (9994, 9995, 2),
+ (10024, 10024, 2),
+ (10060, 10060, 2),
+ (10062, 10062, 2),
+ (10067, 10069, 2),
+ (10071, 10071, 2),
+ (10133, 10135, 2),
+ (10160, 10160, 2),
+ (10175, 10175, 2),
+ (11035, 11036, 2),
+ (11088, 11088, 2),
+ (11093, 11093, 2),
+ (11503, 11505, 0),
+ (11647, 11647, 0),
+ (11744, 11775, 0),
+ (11904, 11929, 2),
+ (11931, 12019, 2),
+ (12032, 12245, 2),
+ (12272, 12283, 2),
+ (12288, 12329, 2),
+ (12330, 12333, 0),
+ (12334, 12350, 2),
+ (12353, 12438, 2),
+ (12441, 12442, 0),
+ (12443, 12543, 2),
+ (12549, 12591, 2),
+ (12593, 12686, 2),
+ (12688, 12771, 2),
+ (12784, 12830, 2),
+ (12832, 12871, 2),
+ (12880, 19903, 2),
+ (19968, 42124, 2),
+ (42128, 42182, 2),
+ (42607, 42610, 0),
+ (42612, 42621, 0),
+ (42654, 42655, 0),
+ (42736, 42737, 0),
+ (43010, 43010, 0),
+ (43014, 43014, 0),
+ (43019, 43019, 0),
+ (43045, 43046, 0),
+ (43052, 43052, 0),
+ (43204, 43205, 0),
+ (43232, 43249, 0),
+ (43263, 43263, 0),
+ (43302, 43309, 0),
+ (43335, 43345, 0),
+ (43360, 43388, 2),
+ (43392, 43394, 0),
+ (43443, 43443, 0),
+ (43446, 43449, 0),
+ (43452, 43453, 0),
+ (43493, 43493, 0),
+ (43561, 43566, 0),
+ (43569, 43570, 0),
+ (43573, 43574, 0),
+ (43587, 43587, 0),
+ (43596, 43596, 0),
+ (43644, 43644, 0),
+ (43696, 43696, 0),
+ (43698, 43700, 0),
+ (43703, 43704, 0),
+ (43710, 43711, 0),
+ (43713, 43713, 0),
+ (43756, 43757, 0),
+ (43766, 43766, 0),
+ (44005, 44005, 0),
+ (44008, 44008, 0),
+ (44013, 44013, 0),
+ (44032, 55203, 2),
+ (63744, 64255, 2),
+ (64286, 64286, 0),
+ (65024, 65039, 0),
+ (65040, 65049, 2),
+ (65056, 65071, 0),
+ (65072, 65106, 2),
+ (65108, 65126, 2),
+ (65128, 65131, 2),
+ (65281, 65376, 2),
+ (65504, 65510, 2),
+ (66045, 66045, 0),
+ (66272, 66272, 0),
+ (66422, 66426, 0),
+ (68097, 68099, 0),
+ (68101, 68102, 0),
+ (68108, 68111, 0),
+ (68152, 68154, 0),
+ (68159, 68159, 0),
+ (68325, 68326, 0),
+ (68900, 68903, 0),
+ (69291, 69292, 0),
+ (69373, 69375, 0),
+ (69446, 69456, 0),
+ (69506, 69509, 0),
+ (69633, 69633, 0),
+ (69688, 69702, 0),
+ (69744, 69744, 0),
+ (69747, 69748, 0),
+ (69759, 69761, 0),
+ (69811, 69814, 0),
+ (69817, 69818, 0),
+ (69826, 69826, 0),
+ (69888, 69890, 0),
+ (69927, 69931, 0),
+ (69933, 69940, 0),
+ (70003, 70003, 0),
+ (70016, 70017, 0),
+ (70070, 70078, 0),
+ (70089, 70092, 0),
+ (70095, 70095, 0),
+ (70191, 70193, 0),
+ (70196, 70196, 0),
+ (70198, 70199, 0),
+ (70206, 70206, 0),
+ (70209, 70209, 0),
+ (70367, 70367, 0),
+ (70371, 70378, 0),
+ (70400, 70401, 0),
+ (70459, 70460, 0),
+ (70464, 70464, 0),
+ (70502, 70508, 0),
+ (70512, 70516, 0),
+ (70712, 70719, 0),
+ (70722, 70724, 0),
+ (70726, 70726, 0),
+ (70750, 70750, 0),
+ (70835, 70840, 0),
+ (70842, 70842, 0),
+ (70847, 70848, 0),
+ (70850, 70851, 0),
+ (71090, 71093, 0),
+ (71100, 71101, 0),
+ (71103, 71104, 0),
+ (71132, 71133, 0),
+ (71219, 71226, 0),
+ (71229, 71229, 0),
+ (71231, 71232, 0),
+ (71339, 71339, 0),
+ (71341, 71341, 0),
+ (71344, 71349, 0),
+ (71351, 71351, 0),
+ (71453, 71455, 0),
+ (71458, 71461, 0),
+ (71463, 71467, 0),
+ (71727, 71735, 0),
+ (71737, 71738, 0),
+ (71995, 71996, 0),
+ (71998, 71998, 0),
+ (72003, 72003, 0),
+ (72148, 72151, 0),
+ (72154, 72155, 0),
+ (72160, 72160, 0),
+ (72193, 72202, 0),
+ (72243, 72248, 0),
+ (72251, 72254, 0),
+ (72263, 72263, 0),
+ (72273, 72278, 0),
+ (72281, 72283, 0),
+ (72330, 72342, 0),
+ (72344, 72345, 0),
+ (72752, 72758, 0),
+ (72760, 72765, 0),
+ (72767, 72767, 0),
+ (72850, 72871, 0),
+ (72874, 72880, 0),
+ (72882, 72883, 0),
+ (72885, 72886, 0),
+ (73009, 73014, 0),
+ (73018, 73018, 0),
+ (73020, 73021, 0),
+ (73023, 73029, 0),
+ (73031, 73031, 0),
+ (73104, 73105, 0),
+ (73109, 73109, 0),
+ (73111, 73111, 0),
+ (73459, 73460, 0),
+ (73472, 73473, 0),
+ (73526, 73530, 0),
+ (73536, 73536, 0),
+ (73538, 73538, 0),
+ (78912, 78912, 0),
+ (78919, 78933, 0),
+ (92912, 92916, 0),
+ (92976, 92982, 0),
+ (94031, 94031, 0),
+ (94095, 94098, 0),
+ (94176, 94179, 2),
+ (94180, 94180, 0),
+ (94192, 94193, 2),
+ (94208, 100343, 2),
+ (100352, 101589, 2),
+ (101632, 101640, 2),
+ (110576, 110579, 2),
+ (110581, 110587, 2),
+ (110589, 110590, 2),
+ (110592, 110882, 2),
+ (110898, 110898, 2),
+ (110928, 110930, 2),
+ (110933, 110933, 2),
+ (110948, 110951, 2),
+ (110960, 111355, 2),
+ (113821, 113822, 0),
+ (118528, 118573, 0),
+ (118576, 118598, 0),
+ (119143, 119145, 0),
+ (119163, 119170, 0),
+ (119173, 119179, 0),
+ (119210, 119213, 0),
+ (119362, 119364, 0),
+ (121344, 121398, 0),
+ (121403, 121452, 0),
+ (121461, 121461, 0),
+ (121476, 121476, 0),
+ (121499, 121503, 0),
+ (121505, 121519, 0),
+ (122880, 122886, 0),
+ (122888, 122904, 0),
+ (122907, 122913, 0),
+ (122915, 122916, 0),
+ (122918, 122922, 0),
+ (123023, 123023, 0),
+ (123184, 123190, 0),
+ (123566, 123566, 0),
+ (123628, 123631, 0),
+ (124140, 124143, 0),
+ (125136, 125142, 0),
+ (125252, 125258, 0),
+ (126980, 126980, 2),
+ (127183, 127183, 2),
+ (127374, 127374, 2),
+ (127377, 127386, 2),
+ (127488, 127490, 2),
+ (127504, 127547, 2),
+ (127552, 127560, 2),
+ (127568, 127569, 2),
+ (127584, 127589, 2),
+ (127744, 127776, 2),
+ (127789, 127797, 2),
+ (127799, 127868, 2),
+ (127870, 127891, 2),
+ (127904, 127946, 2),
+ (127951, 127955, 2),
+ (127968, 127984, 2),
+ (127988, 127988, 2),
+ (127992, 128062, 2),
+ (128064, 128064, 2),
+ (128066, 128252, 2),
+ (128255, 128317, 2),
+ (128331, 128334, 2),
+ (128336, 128359, 2),
+ (128378, 128378, 2),
+ (128405, 128406, 2),
+ (128420, 128420, 2),
+ (128507, 128591, 2),
+ (128640, 128709, 2),
+ (128716, 128716, 2),
+ (128720, 128722, 2),
+ (128725, 128727, 2),
+ (128732, 128735, 2),
+ (128747, 128748, 2),
+ (128756, 128764, 2),
+ (128992, 129003, 2),
+ (129008, 129008, 2),
+ (129292, 129338, 2),
+ (129340, 129349, 2),
+ (129351, 129535, 2),
+ (129648, 129660, 2),
+ (129664, 129672, 2),
+ (129680, 129725, 2),
+ (129727, 129733, 2),
+ (129742, 129755, 2),
+ (129760, 129768, 2),
+ (129776, 129784, 2),
+ (131072, 196605, 2),
+ (196608, 262141, 2),
+ (917760, 917999, 0),
+]
diff --git a/py311/lib/python3.11/site-packages/black/brackets.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/brackets.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..eec539ea5d7ee9d75cafb191d7f4c1dd0577d7e0
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/brackets.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/brackets.py b/py311/lib/python3.11/site-packages/black/brackets.py
new file mode 100644
index 0000000000000000000000000000000000000000..44a3c9a2946fe65762dc41f5697f3a48a99f6ff5
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/brackets.py
@@ -0,0 +1,383 @@
+"""Builds on top of nodes.py to track brackets."""
+
+from collections.abc import Iterable, Sequence
+from dataclasses import dataclass, field
+from typing import Final, Union
+
+from black.nodes import (
+ BRACKET,
+ CLOSING_BRACKETS,
+ COMPARATORS,
+ LOGIC_OPERATORS,
+ MATH_OPERATORS,
+ OPENING_BRACKETS,
+ UNPACKING_PARENTS,
+ VARARGS_PARENTS,
+ is_vararg,
+ syms,
+)
+from blib2to3.pgen2 import token
+from blib2to3.pytree import Leaf, Node
+
+# types
+LN = Union[Leaf, Node]
+Depth = int
+LeafID = int
+NodeType = int
+Priority = int
+
+
+COMPREHENSION_PRIORITY: Final = 20
+COMMA_PRIORITY: Final = 18
+TERNARY_PRIORITY: Final = 16
+LOGIC_PRIORITY: Final = 14
+STRING_PRIORITY: Final = 12
+COMPARATOR_PRIORITY: Final = 10
+MATH_PRIORITIES: Final = {
+ token.VBAR: 9,
+ token.CIRCUMFLEX: 8,
+ token.AMPER: 7,
+ token.LEFTSHIFT: 6,
+ token.RIGHTSHIFT: 6,
+ token.PLUS: 5,
+ token.MINUS: 5,
+ token.STAR: 4,
+ token.SLASH: 4,
+ token.DOUBLESLASH: 4,
+ token.PERCENT: 4,
+ token.AT: 4,
+ token.TILDE: 3,
+ token.DOUBLESTAR: 2,
+}
+DOT_PRIORITY: Final = 1
+
+
+class BracketMatchError(Exception):
+ """Raised when an opening bracket is unable to be matched to a closing bracket."""
+
+
+@dataclass
+class BracketTracker:
+ """Keeps track of brackets on a line."""
+
+ depth: int = 0
+ bracket_match: dict[tuple[Depth, NodeType], Leaf] = field(default_factory=dict)
+ delimiters: dict[LeafID, Priority] = field(default_factory=dict)
+ previous: Leaf | None = None
+ _for_loop_depths: list[int] = field(default_factory=list)
+ _lambda_argument_depths: list[int] = field(default_factory=list)
+ invisible: list[Leaf] = field(default_factory=list)
+
+ def mark(self, leaf: Leaf) -> None:
+ """Mark `leaf` with bracket-related metadata. Keep track of delimiters.
+
+ All leaves receive an int `bracket_depth` field that stores how deep
+ within brackets a given leaf is. 0 means there are no enclosing brackets
+ that started on this line.
+
+ If a leaf is itself a closing bracket and there is a matching opening
+ bracket earlier, it receives an `opening_bracket` field with which it forms a
+ pair. This is a one-directional link to avoid reference cycles. Closing
+ bracket without opening happens on lines continued from previous
+ breaks, e.g. `) -> "ReturnType":` as part of a funcdef where we place
+ the return type annotation on its own line of the previous closing RPAR.
+
+ If a leaf is a delimiter (a token on which Black can split the line if
+ needed) and it's on depth 0, its `id()` is stored in the tracker's
+ `delimiters` field.
+ """
+ if leaf.type == token.COMMENT:
+ return
+
+ if (
+ self.depth == 0
+ and leaf.type in CLOSING_BRACKETS
+ and (self.depth, leaf.type) not in self.bracket_match
+ ):
+ return
+
+ self.maybe_decrement_after_for_loop_variable(leaf)
+ self.maybe_decrement_after_lambda_arguments(leaf)
+ if leaf.type in CLOSING_BRACKETS:
+ self.depth -= 1
+ try:
+ opening_bracket = self.bracket_match.pop((self.depth, leaf.type))
+ except KeyError as e:
+ raise BracketMatchError(
+ "Unable to match a closing bracket to the following opening"
+ f" bracket: {leaf}"
+ ) from e
+ leaf.opening_bracket = opening_bracket
+ if not leaf.value:
+ self.invisible.append(leaf)
+ leaf.bracket_depth = self.depth
+ if self.depth == 0:
+ delim = is_split_before_delimiter(leaf, self.previous)
+ if delim and self.previous is not None:
+ self.delimiters[id(self.previous)] = delim
+ else:
+ delim = is_split_after_delimiter(leaf)
+ if delim:
+ self.delimiters[id(leaf)] = delim
+ if leaf.type in OPENING_BRACKETS:
+ self.bracket_match[self.depth, BRACKET[leaf.type]] = leaf
+ self.depth += 1
+ if not leaf.value:
+ self.invisible.append(leaf)
+ self.previous = leaf
+ self.maybe_increment_lambda_arguments(leaf)
+ self.maybe_increment_for_loop_variable(leaf)
+
+ def any_open_for_or_lambda(self) -> bool:
+ """Return True if there is an open for or lambda expression on the line.
+
+ See maybe_increment_for_loop_variable and maybe_increment_lambda_arguments
+ for details."""
+ return bool(self._for_loop_depths or self._lambda_argument_depths)
+
+ def any_open_brackets(self) -> bool:
+ """Return True if there is an yet unmatched open bracket on the line."""
+ return bool(self.bracket_match)
+
+ def max_delimiter_priority(self, exclude: Iterable[LeafID] = ()) -> Priority:
+ """Return the highest priority of a delimiter found on the line.
+
+ Values are consistent with what `is_split_*_delimiter()` return.
+ Raises ValueError on no delimiters.
+ """
+ return max(v for k, v in self.delimiters.items() if k not in exclude)
+
+ def delimiter_count_with_priority(self, priority: Priority = 0) -> int:
+ """Return the number of delimiters with the given `priority`.
+
+ If no `priority` is passed, defaults to max priority on the line.
+ """
+ if not self.delimiters:
+ return 0
+
+ priority = priority or self.max_delimiter_priority()
+ return sum(1 for p in self.delimiters.values() if p == priority)
+
+ def maybe_increment_for_loop_variable(self, leaf: Leaf) -> bool:
+ """In a for loop, or comprehension, the variables are often unpacks.
+
+ To avoid splitting on the comma in this situation, increase the depth of
+ tokens between `for` and `in`.
+ """
+ if leaf.type == token.NAME and leaf.value == "for":
+ self.depth += 1
+ self._for_loop_depths.append(self.depth)
+ return True
+
+ return False
+
+ def maybe_decrement_after_for_loop_variable(self, leaf: Leaf) -> bool:
+ """See `maybe_increment_for_loop_variable` above for explanation."""
+ if (
+ self._for_loop_depths
+ and self._for_loop_depths[-1] == self.depth
+ and leaf.type == token.NAME
+ and leaf.value == "in"
+ ):
+ self.depth -= 1
+ self._for_loop_depths.pop()
+ return True
+
+ return False
+
+ def maybe_increment_lambda_arguments(self, leaf: Leaf) -> bool:
+ """In a lambda expression, there might be more than one argument.
+
+ To avoid splitting on the comma in this situation, increase the depth of
+ tokens between `lambda` and `:`.
+ """
+ if leaf.type == token.NAME and leaf.value == "lambda":
+ self.depth += 1
+ self._lambda_argument_depths.append(self.depth)
+ return True
+
+ return False
+
+ def maybe_decrement_after_lambda_arguments(self, leaf: Leaf) -> bool:
+ """See `maybe_increment_lambda_arguments` above for explanation."""
+ if (
+ self._lambda_argument_depths
+ and self._lambda_argument_depths[-1] == self.depth
+ and leaf.type == token.COLON
+ ):
+ self.depth -= 1
+ self._lambda_argument_depths.pop()
+ return True
+
+ return False
+
+ def get_open_lsqb(self) -> Leaf | None:
+ """Return the most recent opening square bracket (if any)."""
+ return self.bracket_match.get((self.depth - 1, token.RSQB))
+
+
+def is_split_after_delimiter(leaf: Leaf) -> Priority:
+ """Return the priority of the `leaf` delimiter, given a line break after it.
+
+ The delimiter priorities returned here are from those delimiters that would
+ cause a line break after themselves.
+
+ Higher numbers are higher priority.
+ """
+ if leaf.type == token.COMMA:
+ return COMMA_PRIORITY
+
+ return 0
+
+
+def is_split_before_delimiter(leaf: Leaf, previous: Leaf | None = None) -> Priority:
+ """Return the priority of the `leaf` delimiter, given a line break before it.
+
+ The delimiter priorities returned here are from those delimiters that would
+ cause a line break before themselves.
+
+ Higher numbers are higher priority.
+ """
+ if is_vararg(leaf, within=VARARGS_PARENTS | UNPACKING_PARENTS):
+ # * and ** might also be MATH_OPERATORS but in this case they are not.
+ # Don't treat them as a delimiter.
+ return 0
+
+ if (
+ leaf.type == token.DOT
+ and leaf.parent
+ and leaf.parent.type not in {syms.import_from, syms.dotted_name}
+ and (previous is None or previous.type in CLOSING_BRACKETS)
+ ):
+ return DOT_PRIORITY
+
+ if (
+ leaf.type in MATH_OPERATORS
+ and leaf.parent
+ and leaf.parent.type not in {syms.factor, syms.star_expr}
+ ):
+ return MATH_PRIORITIES[leaf.type]
+
+ if leaf.type in COMPARATORS:
+ return COMPARATOR_PRIORITY
+
+ if (
+ leaf.type == token.STRING
+ and previous is not None
+ and previous.type == token.STRING
+ ):
+ return STRING_PRIORITY
+
+ if leaf.type not in {token.NAME, token.ASYNC}:
+ return 0
+
+ if (
+ leaf.value == "for"
+ and leaf.parent
+ and leaf.parent.type in {syms.comp_for, syms.old_comp_for}
+ or leaf.type == token.ASYNC
+ ):
+ if (
+ not isinstance(leaf.prev_sibling, Leaf)
+ or leaf.prev_sibling.value != "async"
+ ):
+ return COMPREHENSION_PRIORITY
+
+ if (
+ leaf.value == "if"
+ and leaf.parent
+ and leaf.parent.type in {syms.comp_if, syms.old_comp_if}
+ ):
+ return COMPREHENSION_PRIORITY
+
+ if leaf.value in {"if", "else"} and leaf.parent and leaf.parent.type == syms.test:
+ return TERNARY_PRIORITY
+
+ if leaf.value == "is":
+ return COMPARATOR_PRIORITY
+
+ if (
+ leaf.value == "in"
+ and leaf.parent
+ and leaf.parent.type in {syms.comp_op, syms.comparison}
+ and not (
+ previous is not None
+ and previous.type == token.NAME
+ and previous.value == "not"
+ )
+ ):
+ return COMPARATOR_PRIORITY
+
+ if (
+ leaf.value == "not"
+ and leaf.parent
+ and leaf.parent.type == syms.comp_op
+ and not (
+ previous is not None
+ and previous.type == token.NAME
+ and previous.value == "is"
+ )
+ ):
+ return COMPARATOR_PRIORITY
+
+ if leaf.value in LOGIC_OPERATORS and leaf.parent:
+ return LOGIC_PRIORITY
+
+ return 0
+
+
+def max_delimiter_priority_in_atom(node: LN) -> Priority:
+ """Return maximum delimiter priority inside `node`.
+
+ This is specific to atoms with contents contained in a pair of parentheses.
+ If `node` isn't an atom or there are no enclosing parentheses, returns 0.
+ """
+ if node.type != syms.atom:
+ return 0
+
+ first = node.children[0]
+ last = node.children[-1]
+ if not (first.type == token.LPAR and last.type == token.RPAR):
+ return 0
+
+ bt = BracketTracker()
+ for c in node.children[1:-1]:
+ if isinstance(c, Leaf):
+ bt.mark(c)
+ else:
+ for leaf in c.leaves():
+ bt.mark(leaf)
+ try:
+ return bt.max_delimiter_priority()
+
+ except ValueError:
+ return 0
+
+
+def get_leaves_inside_matching_brackets(leaves: Sequence[Leaf]) -> set[LeafID]:
+ """Return leaves that are inside matching brackets.
+
+ The input `leaves` can have non-matching brackets at the head or tail parts.
+ Matching brackets are included.
+ """
+ try:
+ # Start with the first opening bracket and ignore closing brackets before.
+ start_index = next(
+ i for i, l in enumerate(leaves) if l.type in OPENING_BRACKETS
+ )
+ except StopIteration:
+ return set()
+ bracket_stack = []
+ ids = set()
+ for i in range(start_index, len(leaves)):
+ leaf = leaves[i]
+ if leaf.type in OPENING_BRACKETS:
+ bracket_stack.append((BRACKET[leaf.type], i))
+ if leaf.type in CLOSING_BRACKETS:
+ if bracket_stack and leaf.type == bracket_stack[-1][0]:
+ _, start = bracket_stack.pop()
+ for j in range(start, i + 1):
+ ids.add(id(leaves[j]))
+ else:
+ break
+ return ids
diff --git a/py311/lib/python3.11/site-packages/black/cache.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/cache.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..2aef25e1277f5dfb8fe7256257a7407ac6550574
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/cache.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/cache.py b/py311/lib/python3.11/site-packages/black/cache.py
new file mode 100644
index 0000000000000000000000000000000000000000..ef9d99a7b901c772c4076b8b13c030d325900c61
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/cache.py
@@ -0,0 +1,150 @@
+"""Caching of formatted files with feature-based invalidation."""
+
+import hashlib
+import os
+import pickle
+import sys
+import tempfile
+from collections.abc import Iterable
+from dataclasses import dataclass, field
+from pathlib import Path
+from typing import NamedTuple
+
+from platformdirs import user_cache_dir
+
+from _black_version import version as __version__
+from black.mode import Mode
+from black.output import err
+
+if sys.version_info >= (3, 11):
+ from typing import Self
+else:
+ from typing_extensions import Self
+
+
+class FileData(NamedTuple):
+ st_mtime: float
+ st_size: int
+ hash: str
+
+
+def get_cache_dir() -> Path:
+ """Get the cache directory used by black.
+
+ Users can customize this directory on all systems using `BLACK_CACHE_DIR`
+ environment variable. By default, the cache directory is the user cache directory
+ under the black application.
+
+ This result is immediately set to a constant `black.cache.CACHE_DIR` as to avoid
+ repeated calls.
+ """
+ # NOTE: Function mostly exists as a clean way to test getting the cache directory.
+ default_cache_dir = user_cache_dir("black")
+ cache_dir = Path(os.environ.get("BLACK_CACHE_DIR", default_cache_dir))
+ cache_dir = cache_dir / __version__
+ return cache_dir
+
+
+CACHE_DIR = get_cache_dir()
+
+
+def get_cache_file(mode: Mode) -> Path:
+ return CACHE_DIR / f"cache.{mode.get_cache_key()}.pickle"
+
+
+@dataclass
+class Cache:
+ mode: Mode
+ cache_file: Path
+ file_data: dict[str, FileData] = field(default_factory=dict)
+
+ @classmethod
+ def read(cls, mode: Mode) -> Self:
+ """Read the cache if it exists and is well-formed.
+
+ If it is not well-formed, the call to write later should
+ resolve the issue.
+ """
+ cache_file = get_cache_file(mode)
+ try:
+ exists = cache_file.exists()
+ except OSError as e:
+ # Likely file too long; see #4172 and #4174
+ err(f"Unable to read cache file {cache_file} due to {e}")
+ return cls(mode, cache_file)
+ if not exists:
+ return cls(mode, cache_file)
+
+ with cache_file.open("rb") as fobj:
+ try:
+ data: dict[str, tuple[float, int, str]] = pickle.load(fobj)
+ file_data = {k: FileData(*v) for k, v in data.items()}
+ except (pickle.UnpicklingError, ValueError, IndexError):
+ return cls(mode, cache_file)
+
+ return cls(mode, cache_file, file_data)
+
+ @staticmethod
+ def hash_digest(path: Path) -> str:
+ """Return hash digest for path."""
+
+ data = path.read_bytes()
+ return hashlib.sha256(data).hexdigest()
+
+ @staticmethod
+ def get_file_data(path: Path) -> FileData:
+ """Return file data for path."""
+
+ stat = path.stat()
+ hash = Cache.hash_digest(path)
+ return FileData(stat.st_mtime, stat.st_size, hash)
+
+ def is_changed(self, source: Path) -> bool:
+ """Check if source has changed compared to cached version."""
+ res_src = source.resolve()
+ old = self.file_data.get(str(res_src))
+ if old is None:
+ return True
+
+ st = res_src.stat()
+ if st.st_size != old.st_size:
+ return True
+ if st.st_mtime != old.st_mtime:
+ new_hash = Cache.hash_digest(res_src)
+ if new_hash != old.hash:
+ return True
+ return False
+
+ def filtered_cached(self, sources: Iterable[Path]) -> tuple[set[Path], set[Path]]:
+ """Split an iterable of paths in `sources` into two sets.
+
+ The first contains paths of files that modified on disk or are not in the
+ cache. The other contains paths to non-modified files.
+ """
+ changed: set[Path] = set()
+ done: set[Path] = set()
+ for src in sources:
+ if self.is_changed(src):
+ changed.add(src)
+ else:
+ done.add(src)
+ return changed, done
+
+ def write(self, sources: Iterable[Path]) -> None:
+ """Update the cache file data and write a new cache file."""
+ self.file_data.update(
+ **{str(src.resolve()): Cache.get_file_data(src) for src in sources}
+ )
+ try:
+ CACHE_DIR.mkdir(parents=True, exist_ok=True)
+ with tempfile.NamedTemporaryFile(
+ dir=str(self.cache_file.parent), delete=False
+ ) as f:
+ # We store raw tuples in the cache because it's faster.
+ data: dict[str, tuple[float, int, str]] = {
+ k: (*v,) for k, v in self.file_data.items()
+ }
+ pickle.dump(data, f, protocol=4)
+ os.replace(f.name, self.cache_file)
+ except OSError:
+ pass
diff --git a/py311/lib/python3.11/site-packages/black/comments.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/comments.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..a647e1c179494dae2e5f093ed853c8d6c19a3812
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/comments.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/comments.py b/py311/lib/python3.11/site-packages/black/comments.py
new file mode 100644
index 0000000000000000000000000000000000000000..c4c8f799b9737f3dbe5770d6d5486e2b813f19b8
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/comments.py
@@ -0,0 +1,839 @@
+import re
+from collections.abc import Collection, Iterator
+from dataclasses import dataclass
+from functools import lru_cache
+from typing import Final, Union
+
+from black.mode import Mode, Preview
+from black.nodes import (
+ CLOSING_BRACKETS,
+ STANDALONE_COMMENT,
+ STATEMENT,
+ WHITESPACE,
+ container_of,
+ first_leaf_of,
+ is_type_comment_string,
+ make_simple_prefix,
+ preceding_leaf,
+ syms,
+)
+from blib2to3.pgen2 import token
+from blib2to3.pytree import Leaf, Node
+
+# types
+LN = Union[Leaf, Node]
+
+FMT_OFF: Final = {"# fmt: off", "# fmt:off", "# yapf: disable"}
+FMT_SKIP: Final = {"# fmt: skip", "# fmt:skip"}
+FMT_ON: Final = {"# fmt: on", "# fmt:on", "# yapf: enable"}
+
+# Compound statements we care about for fmt: skip handling
+# (excludes except_clause and case_block which aren't standalone compound statements)
+_COMPOUND_STATEMENTS: Final = STATEMENT - {syms.except_clause, syms.case_block}
+
+COMMENT_EXCEPTIONS = " !:#'"
+_COMMENT_PREFIX = "# "
+_COMMENT_LIST_SEPARATOR = ";"
+
+
+@dataclass
+class ProtoComment:
+ """Describes a piece of syntax that is a comment.
+
+ It's not a :class:`blib2to3.pytree.Leaf` so that:
+
+ * it can be cached (`Leaf` objects should not be reused more than once as
+ they store their lineno, column, prefix, and parent information);
+ * `newlines` and `consumed` fields are kept separate from the `value`. This
+ simplifies handling of special marker comments like ``# fmt: off/on``.
+ """
+
+ type: int # token.COMMENT or STANDALONE_COMMENT
+ value: str # content of the comment
+ newlines: int # how many newlines before the comment
+ consumed: int # how many characters of the original leaf's prefix did we consume
+ form_feed: bool # is there a form feed before the comment
+ leading_whitespace: str # leading whitespace before the comment, if any
+
+
+def generate_comments(leaf: LN, mode: Mode) -> Iterator[Leaf]:
+ """Clean the prefix of the `leaf` and generate comments from it, if any.
+
+ Comments in lib2to3 are shoved into the whitespace prefix. This happens
+ in `pgen2/driver.py:Driver.parse_tokens()`. This was a brilliant implementation
+ move because it does away with modifying the grammar to include all the
+ possible places in which comments can be placed.
+
+ The sad consequence for us though is that comments don't "belong" anywhere.
+ This is why this function generates simple parentless Leaf objects for
+ comments. We simply don't know what the correct parent should be.
+
+ No matter though, we can live without this. We really only need to
+ differentiate between inline and standalone comments. The latter don't
+ share the line with any code.
+
+ Inline comments are emitted as regular token.COMMENT leaves. Standalone
+ are emitted with a fake STANDALONE_COMMENT token identifier.
+ """
+ total_consumed = 0
+ for pc in list_comments(
+ leaf.prefix, is_endmarker=leaf.type == token.ENDMARKER, mode=mode
+ ):
+ total_consumed = pc.consumed
+ prefix = make_simple_prefix(pc.newlines, pc.form_feed)
+ yield Leaf(pc.type, pc.value, prefix=prefix)
+ normalize_trailing_prefix(leaf, total_consumed)
+
+
+@lru_cache(maxsize=4096)
+def list_comments(prefix: str, *, is_endmarker: bool, mode: Mode) -> list[ProtoComment]:
+ """Return a list of :class:`ProtoComment` objects parsed from the given `prefix`."""
+ result: list[ProtoComment] = []
+ if not prefix or "#" not in prefix:
+ return result
+
+ consumed = 0
+ nlines = 0
+ ignored_lines = 0
+ form_feed = False
+ for index, full_line in enumerate(re.split("\r?\n|\r", prefix)):
+ consumed += len(full_line) + 1 # adding the length of the split '\n'
+ match = re.match(r"^(\s*)(\S.*|)$", full_line)
+ assert match
+ whitespace, line = match.groups()
+ if not line:
+ nlines += 1
+ if "\f" in full_line:
+ form_feed = True
+ if not line.startswith("#"):
+ # Escaped newlines outside of a comment are not really newlines at
+ # all. We treat a single-line comment following an escaped newline
+ # as a simple trailing comment.
+ if line.endswith("\\"):
+ ignored_lines += 1
+ continue
+
+ if index == ignored_lines and not is_endmarker:
+ comment_type = token.COMMENT # simple trailing comment
+ else:
+ comment_type = STANDALONE_COMMENT
+ comment = make_comment(line, mode=mode)
+ result.append(
+ ProtoComment(
+ type=comment_type,
+ value=comment,
+ newlines=nlines,
+ consumed=consumed,
+ form_feed=form_feed,
+ leading_whitespace=whitespace,
+ )
+ )
+ form_feed = False
+ nlines = 0
+ return result
+
+
+def normalize_trailing_prefix(leaf: LN, total_consumed: int) -> None:
+ """Normalize the prefix that's left over after generating comments.
+
+ Note: don't use backslashes for formatting or you'll lose your voting rights.
+ """
+ remainder = leaf.prefix[total_consumed:]
+ if "\\" not in remainder:
+ nl_count = remainder.count("\n")
+ form_feed = "\f" in remainder and remainder.endswith("\n")
+ leaf.prefix = make_simple_prefix(nl_count, form_feed)
+ return
+
+ leaf.prefix = ""
+
+
+def make_comment(content: str, mode: Mode) -> str:
+ """Return a consistently formatted comment from the given `content` string.
+
+ All comments (except for "##", "#!", "#:", '#'") should have a single
+ space between the hash sign and the content.
+
+ If `content` didn't start with a hash sign, one is provided.
+
+ Comments containing fmt directives are preserved exactly as-is to respect
+ user intent (e.g., `#no space # fmt: skip` stays as-is).
+ """
+ content = content.rstrip()
+ if not content:
+ return "#"
+
+ # Preserve comments with fmt directives exactly as-is
+ if content.startswith("#") and contains_fmt_directive(content):
+ return content
+
+ if content[0] == "#":
+ content = content[1:]
+ if (
+ content
+ and content[0] == "\N{NO-BREAK SPACE}"
+ and not is_type_comment_string("# " + content.lstrip(), mode=mode)
+ ):
+ content = " " + content[1:] # Replace NBSP by a simple space
+ if (
+ Preview.standardize_type_comments in mode
+ and content
+ and "\N{NO-BREAK SPACE}" not in content
+ and is_type_comment_string("#" + content, mode=mode)
+ ):
+ type_part, value_part = content.split(":", 1)
+ content = type_part.strip() + ": " + value_part.strip()
+
+ if content and content[0] not in COMMENT_EXCEPTIONS:
+ content = " " + content
+ return "#" + content
+
+
+def normalize_fmt_off(
+ node: Node, mode: Mode, lines: Collection[tuple[int, int]]
+) -> None:
+ """Convert content between `# fmt: off`/`# fmt: on` into standalone comments."""
+ try_again = True
+ while try_again:
+ try_again = convert_one_fmt_off_pair(node, mode, lines)
+
+
+def _should_process_fmt_comment(
+ comment: ProtoComment, leaf: Leaf
+) -> tuple[bool, bool, bool]:
+ """Check if comment should be processed for fmt handling.
+
+ Returns (should_process, is_fmt_off, is_fmt_skip).
+ """
+ is_fmt_off = contains_fmt_directive(comment.value, FMT_OFF)
+ is_fmt_skip = contains_fmt_directive(comment.value, FMT_SKIP)
+
+ if not is_fmt_off and not is_fmt_skip:
+ return False, False, False
+
+ # Invalid use when `# fmt: off` is applied before a closing bracket
+ if is_fmt_off and leaf.type in CLOSING_BRACKETS:
+ return False, False, False
+
+ return True, is_fmt_off, is_fmt_skip
+
+
+def _is_valid_standalone_fmt_comment(
+ comment: ProtoComment, leaf: Leaf, is_fmt_off: bool, is_fmt_skip: bool
+) -> bool:
+ """Check if comment is a valid standalone fmt directive.
+
+ We only want standalone comments. If there's no previous leaf or if
+ the previous leaf is indentation, it's a standalone comment in disguise.
+ """
+ if comment.type == STANDALONE_COMMENT:
+ return True
+
+ prev = preceding_leaf(leaf)
+ if not prev:
+ return True
+
+ # Treat STANDALONE_COMMENT nodes as whitespace for check
+ if is_fmt_off and prev.type not in WHITESPACE and prev.type != STANDALONE_COMMENT:
+ return False
+ if is_fmt_skip and prev.type in WHITESPACE:
+ return False
+
+ return True
+
+
+def _handle_comment_only_fmt_block(
+ leaf: Leaf,
+ comment: ProtoComment,
+ previous_consumed: int,
+ mode: Mode,
+) -> bool:
+ """Handle fmt:off/on blocks that contain only comments.
+
+ Returns True if a block was converted, False otherwise.
+ """
+ all_comments = list_comments(leaf.prefix, is_endmarker=False, mode=mode)
+
+ # Find the first fmt:off and its matching fmt:on
+ fmt_off_idx = None
+ fmt_on_idx = None
+ for idx, c in enumerate(all_comments):
+ if fmt_off_idx is None and contains_fmt_directive(c.value, FMT_OFF):
+ fmt_off_idx = idx
+ if (
+ fmt_off_idx is not None
+ and idx > fmt_off_idx
+ and contains_fmt_directive(c.value, FMT_ON)
+ ):
+ fmt_on_idx = idx
+ break
+
+ # Only proceed if we found both directives
+ if fmt_on_idx is None or fmt_off_idx is None:
+ return False
+
+ comment = all_comments[fmt_off_idx]
+ fmt_on_comment = all_comments[fmt_on_idx]
+ original_prefix = leaf.prefix
+
+ # Build the hidden value
+ start_pos = comment.consumed
+ end_pos = fmt_on_comment.consumed
+ content_between_and_fmt_on = original_prefix[start_pos:end_pos]
+ hidden_value = comment.value + "\n" + content_between_and_fmt_on
+
+ if hidden_value.endswith("\n"):
+ hidden_value = hidden_value[:-1]
+
+ # Build the standalone comment prefix - preserve all content before fmt:off
+ # including any comments that precede it
+ if fmt_off_idx == 0:
+ # No comments before fmt:off, use previous_consumed
+ pre_fmt_off_consumed = previous_consumed
+ else:
+ # Use the consumed position of the last comment before fmt:off
+ # This preserves all comments and content before the fmt:off directive
+ pre_fmt_off_consumed = all_comments[fmt_off_idx - 1].consumed
+
+ standalone_comment_prefix = (
+ original_prefix[:pre_fmt_off_consumed] + "\n" * comment.newlines
+ )
+
+ fmt_off_prefix = original_prefix.split(comment.value)[0]
+ if "\n" in fmt_off_prefix:
+ fmt_off_prefix = fmt_off_prefix.split("\n")[-1]
+ standalone_comment_prefix += fmt_off_prefix
+
+ # Update leaf prefix
+ leaf.prefix = original_prefix[fmt_on_comment.consumed :]
+
+ # Insert the STANDALONE_COMMENT
+ parent = leaf.parent
+ assert parent is not None, "INTERNAL ERROR: fmt: on/off handling (prefix only)"
+
+ leaf_idx = None
+ for idx, child in enumerate(parent.children):
+ if child is leaf:
+ leaf_idx = idx
+ break
+
+ assert leaf_idx is not None, "INTERNAL ERROR: fmt: on/off handling (leaf index)"
+
+ parent.insert_child(
+ leaf_idx,
+ Leaf(
+ STANDALONE_COMMENT,
+ hidden_value,
+ prefix=standalone_comment_prefix,
+ fmt_pass_converted_first_leaf=None,
+ ),
+ )
+ return True
+
+
+def convert_one_fmt_off_pair(
+ node: Node, mode: Mode, lines: Collection[tuple[int, int]]
+) -> bool:
+ """Convert content of a single `# fmt: off`/`# fmt: on` into a standalone comment.
+
+ Returns True if a pair was converted.
+ """
+ for leaf in node.leaves():
+ # Skip STANDALONE_COMMENT nodes that were created by fmt:off/on/skip processing
+ # to avoid reprocessing them in subsequent iterations
+ if leaf.type == STANDALONE_COMMENT and hasattr(
+ leaf, "fmt_pass_converted_first_leaf"
+ ):
+ continue
+
+ previous_consumed = 0
+ for comment in list_comments(leaf.prefix, is_endmarker=False, mode=mode):
+ should_process, is_fmt_off, is_fmt_skip = _should_process_fmt_comment(
+ comment, leaf
+ )
+ if not should_process:
+ previous_consumed = comment.consumed
+ continue
+
+ if not _is_valid_standalone_fmt_comment(
+ comment, leaf, is_fmt_off, is_fmt_skip
+ ):
+ previous_consumed = comment.consumed
+ continue
+
+ ignored_nodes = list(generate_ignored_nodes(leaf, comment, mode))
+
+ # Handle comment-only blocks
+ if not ignored_nodes and is_fmt_off:
+ if _handle_comment_only_fmt_block(
+ leaf, comment, previous_consumed, mode
+ ):
+ return True
+ continue
+
+ # Need actual nodes to process
+ if not ignored_nodes:
+ continue
+
+ # Handle regular fmt blocks
+
+ _handle_regular_fmt_block(
+ ignored_nodes,
+ comment,
+ previous_consumed,
+ is_fmt_skip,
+ lines,
+ leaf,
+ )
+ return True
+
+ return False
+
+
+def _handle_regular_fmt_block(
+ ignored_nodes: list[LN],
+ comment: ProtoComment,
+ previous_consumed: int,
+ is_fmt_skip: bool,
+ lines: Collection[tuple[int, int]],
+ leaf: Leaf,
+) -> None:
+ """Handle fmt blocks with actual AST nodes."""
+ first = ignored_nodes[0] # Can be a container node with the `leaf`.
+ parent = first.parent
+ prefix = first.prefix
+
+ if contains_fmt_directive(comment.value, FMT_OFF):
+ first.prefix = prefix[comment.consumed :]
+ if is_fmt_skip:
+ first.prefix = ""
+ standalone_comment_prefix = prefix
+ else:
+ standalone_comment_prefix = prefix[:previous_consumed] + "\n" * comment.newlines
+
+ # Ensure STANDALONE_COMMENT nodes have trailing newlines when stringified
+ # This prevents multiple fmt: skip comments from being concatenated on one line
+ parts = []
+ for node in ignored_nodes:
+ if isinstance(node, Leaf) and node.type == STANDALONE_COMMENT:
+ # Add newline after STANDALONE_COMMENT Leaf
+ node_str = str(node)
+ if not node_str.endswith("\n"):
+ node_str += "\n"
+ parts.append(node_str)
+ elif isinstance(node, Node):
+ # For nodes that might contain STANDALONE_COMMENT leaves,
+ # we need custom stringify
+ has_standalone = any(
+ leaf.type == STANDALONE_COMMENT for leaf in node.leaves()
+ )
+ if has_standalone:
+ # Stringify node with STANDALONE_COMMENT leaves having trailing newlines
+ def stringify_node(n: LN) -> str:
+ if isinstance(n, Leaf):
+ if n.type == STANDALONE_COMMENT:
+ result = n.prefix + n.value
+ if not result.endswith("\n"):
+ result += "\n"
+ return result
+ return str(n)
+ else:
+ # For nested nodes, recursively process children
+ return "".join(stringify_node(child) for child in n.children)
+
+ parts.append(stringify_node(node))
+ else:
+ parts.append(str(node))
+ else:
+ parts.append(str(node))
+
+ hidden_value = "".join(parts)
+ comment_lineno = leaf.lineno - comment.newlines
+
+ if contains_fmt_directive(comment.value, FMT_OFF):
+ fmt_off_prefix = ""
+ if len(lines) > 0 and not any(
+ line[0] <= comment_lineno <= line[1] for line in lines
+ ):
+ # keeping indentation of comment by preserving original whitespaces.
+ fmt_off_prefix = prefix.split(comment.value)[0]
+ if "\n" in fmt_off_prefix:
+ fmt_off_prefix = fmt_off_prefix.split("\n")[-1]
+ standalone_comment_prefix += fmt_off_prefix
+ hidden_value = comment.value + "\n" + hidden_value
+
+ if is_fmt_skip:
+ hidden_value += comment.leading_whitespace + comment.value
+
+ if hidden_value.endswith("\n"):
+ # That happens when one of the `ignored_nodes` ended with a NEWLINE
+ # leaf (possibly followed by a DEDENT).
+ hidden_value = hidden_value[:-1]
+
+ first_idx: int | None = None
+ for ignored in ignored_nodes:
+ index = ignored.remove()
+ if first_idx is None:
+ first_idx = index
+
+ assert parent is not None, "INTERNAL ERROR: fmt: on/off handling (1)"
+ assert first_idx is not None, "INTERNAL ERROR: fmt: on/off handling (2)"
+
+ parent.insert_child(
+ first_idx,
+ Leaf(
+ STANDALONE_COMMENT,
+ hidden_value,
+ prefix=standalone_comment_prefix,
+ fmt_pass_converted_first_leaf=first_leaf_of(first),
+ ),
+ )
+
+
+def generate_ignored_nodes(
+ leaf: Leaf, comment: ProtoComment, mode: Mode
+) -> Iterator[LN]:
+ """Starting from the container of `leaf`, generate all leaves until `# fmt: on`.
+
+ If comment is skip, returns leaf only.
+ Stops at the end of the block.
+ """
+ if contains_fmt_directive(comment.value, FMT_SKIP):
+ yield from _generate_ignored_nodes_from_fmt_skip(leaf, comment, mode)
+ return
+ container: LN | None = container_of(leaf)
+ while container is not None and container.type != token.ENDMARKER:
+ if is_fmt_on(container, mode=mode):
+ return
+
+ # fix for fmt: on in children
+ if children_contains_fmt_on(container, mode=mode):
+ for index, child in enumerate(container.children):
+ if isinstance(child, Leaf) and is_fmt_on(child, mode=mode):
+ if child.type in CLOSING_BRACKETS:
+ # This means `# fmt: on` is placed at a different bracket level
+ # than `# fmt: off`. This is an invalid use, but as a courtesy,
+ # we include this closing bracket in the ignored nodes.
+ # The alternative is to fail the formatting.
+ yield child
+ return
+ if (
+ child.type == token.INDENT
+ and index < len(container.children) - 1
+ and children_contains_fmt_on(
+ container.children[index + 1], mode=mode
+ )
+ ):
+ # This means `# fmt: on` is placed right after an indentation
+ # level, and we shouldn't swallow the previous INDENT token.
+ return
+ if children_contains_fmt_on(child, mode=mode):
+ return
+ yield child
+ else:
+ if container.type == token.DEDENT and container.next_sibling is None:
+ # This can happen when there is no matching `# fmt: on` comment at the
+ # same level as `# fmt: on`. We need to keep this DEDENT.
+ return
+ yield container
+ container = container.next_sibling
+
+
+def _find_compound_statement_context(parent: Node) -> Node | None:
+ """Return the body node of a compound statement if we should respect fmt: skip.
+
+ This handles one-line compound statements like:
+ if condition: body # fmt: skip
+
+ When Black expands such statements, they temporarily look like:
+ if condition:
+ body # fmt: skip
+
+ In both cases, we want to return the body node (either the simple_stmt directly
+ or the suite containing it).
+ """
+ if parent.type != syms.simple_stmt:
+ return None
+
+ if not isinstance(parent.parent, Node):
+ return None
+
+ # Case 1: Expanded form after Black's initial formatting pass.
+ # The one-liner has been split across multiple lines:
+ # if True:
+ # print("a"); print("b") # fmt: skip
+ # Structure: compound_stmt -> suite -> simple_stmt
+ if (
+ parent.parent.type == syms.suite
+ and isinstance(parent.parent.parent, Node)
+ and parent.parent.parent.type in _COMPOUND_STATEMENTS
+ ):
+ return parent.parent
+
+ # Case 2: Original one-line form from the input source.
+ # The statement is still on a single line:
+ # if True: print("a"); print("b") # fmt: skip
+ # Structure: compound_stmt -> simple_stmt
+ if parent.parent.type in _COMPOUND_STATEMENTS:
+ return parent
+
+ return None
+
+
+def _should_keep_compound_statement_inline(
+ body_node: Node, simple_stmt_parent: Node
+) -> bool:
+ """Check if a compound statement should be kept on one line.
+
+ Returns True only for compound statements with semicolon-separated bodies,
+ like: if True: print("a"); print("b") # fmt: skip
+ """
+ # Check if there are semicolons in the body
+ for leaf in body_node.leaves():
+ if leaf.type == token.SEMI:
+ # Verify it's a single-line body (one simple_stmt)
+ if body_node.type == syms.suite:
+ # After formatting: check suite has one simple_stmt child
+ simple_stmts = [
+ child
+ for child in body_node.children
+ if child.type == syms.simple_stmt
+ ]
+ return len(simple_stmts) == 1 and simple_stmts[0] is simple_stmt_parent
+ else:
+ # Original form: body_node IS the simple_stmt
+ return body_node is simple_stmt_parent
+ return False
+
+
+def _get_compound_statement_header(
+ body_node: Node, simple_stmt_parent: Node
+) -> list[LN]:
+ """Get header nodes for a compound statement that should be preserved inline."""
+ if not _should_keep_compound_statement_inline(body_node, simple_stmt_parent):
+ return []
+
+ # Get the compound statement (parent of body)
+ compound_stmt = body_node.parent
+ if compound_stmt is None or compound_stmt.type not in _COMPOUND_STATEMENTS:
+ return []
+
+ # Collect all header leaves before the body
+ header_leaves: list[LN] = []
+ for child in compound_stmt.children:
+ if child is body_node:
+ break
+ if isinstance(child, Leaf):
+ if child.type not in (token.NEWLINE, token.INDENT):
+ header_leaves.append(child)
+ else:
+ header_leaves.extend(child.leaves())
+ return header_leaves
+
+
+def _generate_ignored_nodes_from_fmt_skip(
+ leaf: Leaf, comment: ProtoComment, mode: Mode
+) -> Iterator[LN]:
+ """Generate all leaves that should be ignored by the `# fmt: skip` from `leaf`."""
+ prev_sibling = leaf.prev_sibling
+ parent = leaf.parent
+ ignored_nodes: list[LN] = []
+ # Need to properly format the leaf prefix to compare it to comment.value,
+ # which is also formatted
+ comments = list_comments(leaf.prefix, is_endmarker=False, mode=mode)
+ if not comments or comment.value != comments[0].value:
+ return
+
+ if Preview.fix_fmt_skip_in_one_liners in mode and not prev_sibling and parent:
+ prev_sibling = parent.prev_sibling
+
+ if prev_sibling is not None:
+ leaf.prefix = leaf.prefix[comment.consumed :]
+
+ if Preview.fix_fmt_skip_in_one_liners not in mode:
+ siblings = [prev_sibling]
+ while (
+ "\n" not in prev_sibling.prefix
+ and prev_sibling.prev_sibling is not None
+ ):
+ prev_sibling = prev_sibling.prev_sibling
+ siblings.insert(0, prev_sibling)
+ yield from siblings
+ return
+
+ # Generates the nodes to be ignored by `fmt: skip`.
+
+ # Nodes to ignore are the ones on the same line as the
+ # `# fmt: skip` comment, excluding the `# fmt: skip`
+ # node itself.
+
+ # Traversal process (starting at the `# fmt: skip` node):
+ # 1. Move to the `prev_sibling` of the current node.
+ # 2. If `prev_sibling` has children, go to its rightmost leaf.
+ # 3. If there's no `prev_sibling`, move up to the parent
+ # node and repeat.
+ # 4. Continue until:
+ # a. You encounter an `INDENT` or `NEWLINE` node (indicates
+ # start of the line).
+ # b. You reach the root node.
+
+ # Include all visited LEAVES in the ignored list, except INDENT
+ # or NEWLINE leaves.
+
+ current_node = prev_sibling
+ ignored_nodes = [current_node]
+ if current_node.prev_sibling is None and current_node.parent is not None:
+ current_node = current_node.parent
+
+ # Track seen nodes to detect cycles that can occur after tree modifications
+ seen_nodes = {id(current_node)}
+
+ while "\n" not in current_node.prefix and current_node.prev_sibling is not None:
+ leaf_nodes = list(current_node.prev_sibling.leaves())
+ next_node = leaf_nodes[-1] if leaf_nodes else current_node
+
+ # Detect infinite loop - if we've seen this node before, stop
+ # This can happen when STANDALONE_COMMENT nodes are inserted
+ # during processing
+ if id(next_node) in seen_nodes:
+ break
+
+ current_node = next_node
+ seen_nodes.add(id(current_node))
+
+ # Stop if we encounter a STANDALONE_COMMENT created by fmt processing
+ if (
+ isinstance(current_node, Leaf)
+ and current_node.type == STANDALONE_COMMENT
+ and hasattr(current_node, "fmt_pass_converted_first_leaf")
+ ):
+ break
+
+ if (
+ current_node.type in CLOSING_BRACKETS
+ and current_node.parent
+ and current_node.parent.type == syms.atom
+ ):
+ current_node = current_node.parent
+
+ if current_node.type in (token.NEWLINE, token.INDENT):
+ current_node.prefix = ""
+ break
+
+ if current_node.type == token.DEDENT:
+ break
+
+ # Special case for with expressions
+ # Without this, we can stuck inside the asexpr_test's children's children
+ if (
+ current_node.parent
+ and current_node.parent.type == syms.asexpr_test
+ and current_node.parent.parent
+ and current_node.parent.parent.type == syms.with_stmt
+ ):
+ current_node = current_node.parent
+
+ ignored_nodes.insert(0, current_node)
+
+ if current_node.prev_sibling is None and current_node.parent is not None:
+ current_node = current_node.parent
+
+ # Special handling for compound statements with semicolon-separated bodies
+ if Preview.fix_fmt_skip_in_one_liners in mode and isinstance(parent, Node):
+ body_node = _find_compound_statement_context(parent)
+ if body_node is not None:
+ header_nodes = _get_compound_statement_header(body_node, parent)
+ if header_nodes:
+ ignored_nodes = header_nodes + ignored_nodes
+
+ yield from ignored_nodes
+ elif (
+ parent is not None and parent.type == syms.suite and leaf.type == token.NEWLINE
+ ):
+ # The `# fmt: skip` is on the colon line of the if/while/def/class/...
+ # statements. The ignored nodes should be previous siblings of the
+ # parent suite node.
+ leaf.prefix = ""
+ parent_sibling = parent.prev_sibling
+ while parent_sibling is not None and parent_sibling.type != syms.suite:
+ ignored_nodes.insert(0, parent_sibling)
+ parent_sibling = parent_sibling.prev_sibling
+ # Special case for `async_stmt` where the ASYNC token is on the
+ # grandparent node.
+ grandparent = parent.parent
+ if (
+ grandparent is not None
+ and grandparent.prev_sibling is not None
+ and grandparent.prev_sibling.type == token.ASYNC
+ ):
+ ignored_nodes.insert(0, grandparent.prev_sibling)
+ yield from iter(ignored_nodes)
+
+
+def is_fmt_on(container: LN, mode: Mode) -> bool:
+ """Determine whether formatting is switched on within a container.
+ Determined by whether the last `# fmt:` comment is `on` or `off`.
+ """
+ fmt_on = False
+ for comment in list_comments(container.prefix, is_endmarker=False, mode=mode):
+ if contains_fmt_directive(comment.value, FMT_ON):
+ fmt_on = True
+ elif contains_fmt_directive(comment.value, FMT_OFF):
+ fmt_on = False
+ return fmt_on
+
+
+def children_contains_fmt_on(container: LN, mode: Mode) -> bool:
+ """Determine if children have formatting switched on."""
+ for child in container.children:
+ leaf = first_leaf_of(child)
+ if leaf is not None and is_fmt_on(leaf, mode=mode):
+ return True
+
+ return False
+
+
+def contains_pragma_comment(comment_list: list[Leaf]) -> bool:
+ """
+ Returns:
+ True iff one of the comments in @comment_list is a pragma used by one
+ of the more common static analysis tools for python (e.g. mypy, flake8,
+ pylint).
+ """
+ for comment in comment_list:
+ if comment.value.startswith(("# type:", "# noqa", "# pylint:")):
+ return True
+
+ return False
+
+
+def contains_fmt_directive(
+ comment_line: str, directives: set[str] = FMT_OFF | FMT_ON | FMT_SKIP
+) -> bool:
+ """
+ Checks if the given comment contains format directives, alone or paired with
+ other comments.
+
+ Defaults to checking all directives (skip, off, on, yapf), but can be
+ narrowed to specific ones.
+
+ Matching styles:
+ # foobar <-- single comment
+ # foobar # foobar # foobar <-- multiple comments
+ # foobar; foobar <-- list of comments (; separated)
+ """
+ semantic_comment_blocks = [
+ comment_line,
+ *[
+ _COMMENT_PREFIX + comment.strip()
+ for comment in comment_line.split(_COMMENT_PREFIX)[1:]
+ ],
+ *[
+ _COMMENT_PREFIX + comment.strip()
+ for comment in comment_line.strip(_COMMENT_PREFIX).split(
+ _COMMENT_LIST_SEPARATOR
+ )
+ ],
+ ]
+
+ return any(comment in directives for comment in semantic_comment_blocks)
diff --git a/py311/lib/python3.11/site-packages/black/concurrency.py b/py311/lib/python3.11/site-packages/black/concurrency.py
new file mode 100644
index 0000000000000000000000000000000000000000..e939dc34681a57cf180c69e8b39999a721da5442
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/concurrency.py
@@ -0,0 +1,204 @@
+"""
+Formatting many files at once via multiprocessing. Contains entrypoint and utilities.
+
+NOTE: this module is only imported if we need to format several files at once.
+"""
+
+from __future__ import annotations
+
+import asyncio
+import logging
+import os
+import signal
+import sys
+import traceback
+from collections.abc import Iterable
+from concurrent.futures import Executor, ProcessPoolExecutor, ThreadPoolExecutor
+from multiprocessing import Manager
+from pathlib import Path
+from typing import Any
+
+from mypy_extensions import mypyc_attr
+
+from black import WriteBack, format_file_in_place
+from black.cache import Cache
+from black.mode import Mode
+from black.output import err
+from black.report import Changed, Report
+
+
+def maybe_install_uvloop() -> None:
+ """If our environment has uvloop installed we use it.
+
+ This is called only from command-line entry points to avoid
+ interfering with the parent process if Black is used as a library.
+ """
+ try:
+ import uvloop
+
+ uvloop.install()
+ except ImportError:
+ pass
+
+
+def cancel(tasks: Iterable[asyncio.Future[Any]]) -> None:
+ """asyncio signal handler that cancels all `tasks` and reports to stderr."""
+ err("Aborted!")
+ for task in tasks:
+ task.cancel()
+
+
+def shutdown(loop: asyncio.AbstractEventLoop) -> None:
+ """Cancel all pending tasks on `loop`, wait for them, and close the loop."""
+ try:
+ # This part is borrowed from asyncio/runners.py in Python 3.7b2.
+ to_cancel = [task for task in asyncio.all_tasks(loop) if not task.done()]
+ if not to_cancel:
+ return
+
+ for task in to_cancel:
+ task.cancel()
+ loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
+ finally:
+ # `concurrent.futures.Future` objects cannot be cancelled once they
+ # are already running. There might be some when the `shutdown()` happened.
+ # Silence their logger's spew about the event loop being closed.
+ cf_logger = logging.getLogger("concurrent.futures")
+ cf_logger.setLevel(logging.CRITICAL)
+ loop.close()
+
+
+# diff-shades depends on being to monkeypatch this function to operate. I know it's
+# not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
+@mypyc_attr(patchable=True)
+def reformat_many(
+ sources: set[Path],
+ fast: bool,
+ write_back: WriteBack,
+ mode: Mode,
+ report: Report,
+ workers: int | None,
+ no_cache: bool = False,
+) -> None:
+ """Reformat multiple files using a ProcessPoolExecutor."""
+ maybe_install_uvloop()
+
+ if workers is None:
+ workers = int(os.environ.get("BLACK_NUM_WORKERS", 0))
+ workers = workers or os.cpu_count() or 1
+ if sys.platform == "win32":
+ # Work around https://bugs.python.org/issue26903
+ workers = min(workers, 60)
+
+ executor: Executor | None = None
+ if workers > 1:
+ try:
+ executor = ProcessPoolExecutor(max_workers=workers)
+ except (ImportError, NotImplementedError, OSError):
+ # we arrive here if the underlying system does not support multi-processing
+ # like in AWS Lambda or Termux, in which case we gracefully fallback to
+ # a ThreadPoolExecutor with just a single worker (more workers would not do
+ # us any good due to the Global Interpreter Lock)
+ pass
+
+ if executor is None:
+ executor = ThreadPoolExecutor(max_workers=1)
+
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
+ try:
+ loop.run_until_complete(
+ schedule_formatting(
+ sources=sources,
+ fast=fast,
+ write_back=write_back,
+ mode=mode,
+ report=report,
+ loop=loop,
+ executor=executor,
+ no_cache=no_cache,
+ )
+ )
+ finally:
+ try:
+ shutdown(loop)
+ finally:
+ asyncio.set_event_loop(None)
+ if executor is not None:
+ executor.shutdown()
+
+
+async def schedule_formatting(
+ sources: set[Path],
+ fast: bool,
+ write_back: WriteBack,
+ mode: Mode,
+ report: Report,
+ loop: asyncio.AbstractEventLoop,
+ executor: Executor,
+ no_cache: bool = False,
+) -> None:
+ """Run formatting of `sources` in parallel using the provided `executor`.
+
+ (Use ProcessPoolExecutors for actual parallelism.)
+
+ `write_back`, `fast`, and `mode` options are passed to
+ :func:`format_file_in_place`.
+ """
+ cache = None if no_cache else Cache.read(mode)
+ if cache is not None and write_back not in (
+ WriteBack.DIFF,
+ WriteBack.COLOR_DIFF,
+ ):
+ sources, cached = cache.filtered_cached(sources)
+ for src in sorted(cached):
+ report.done(src, Changed.CACHED)
+ if not sources:
+ return
+
+ cancelled = []
+ sources_to_cache = []
+ lock = None
+ if write_back in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
+ # For diff output, we need locks to ensure we don't interleave output
+ # from different processes.
+ manager = Manager()
+ lock = manager.Lock()
+ tasks = {
+ asyncio.ensure_future(
+ loop.run_in_executor(
+ executor, format_file_in_place, src, fast, mode, write_back, lock
+ )
+ ): src
+ for src in sorted(sources)
+ }
+ pending = tasks.keys()
+ try:
+ loop.add_signal_handler(signal.SIGINT, cancel, pending)
+ loop.add_signal_handler(signal.SIGTERM, cancel, pending)
+ except NotImplementedError:
+ # There are no good alternatives for these on Windows.
+ pass
+ while pending:
+ done, _ = await asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED)
+ for task in done:
+ src = tasks.pop(task)
+ if task.cancelled():
+ cancelled.append(task)
+ elif exc := task.exception():
+ if report.verbose:
+ traceback.print_exception(type(exc), exc, exc.__traceback__)
+ report.failed(src, str(exc))
+ else:
+ changed = Changed.YES if task.result() else Changed.NO
+ # If the file was written back or was successfully checked as
+ # well-formatted, store this information in the cache.
+ if write_back is WriteBack.YES or (
+ write_back is WriteBack.CHECK and changed is Changed.NO
+ ):
+ sources_to_cache.append(src)
+ report.done(src, changed)
+ if cancelled:
+ await asyncio.gather(*cancelled, return_exceptions=True)
+ if sources_to_cache and not no_cache and cache is not None:
+ cache.write(sources_to_cache)
diff --git a/py311/lib/python3.11/site-packages/black/const.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/const.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..b1222881e6f95e3bb82f8e81a36bc36edb7c2f4d
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/const.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/const.py b/py311/lib/python3.11/site-packages/black/const.py
new file mode 100644
index 0000000000000000000000000000000000000000..ee466679c7074caf936b1e2707b82468e8d83432
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/const.py
@@ -0,0 +1,4 @@
+DEFAULT_LINE_LENGTH = 88
+DEFAULT_EXCLUDES = r"/(\.direnv|\.eggs|\.git|\.hg|\.ipynb_checkpoints|\.mypy_cache|\.nox|\.pytest_cache|\.ruff_cache|\.tox|\.svn|\.venv|\.vscode|__pypackages__|_build|buck-out|build|dist|venv)/" # noqa: B950
+DEFAULT_INCLUDES = r"(\.pyi?|\.ipynb)$"
+STDIN_PLACEHOLDER = "__BLACK_STDIN_FILENAME__"
diff --git a/py311/lib/python3.11/site-packages/black/debug.py b/py311/lib/python3.11/site-packages/black/debug.py
new file mode 100644
index 0000000000000000000000000000000000000000..f051c497d0b601e89461332ad17661c643e181f2
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/debug.py
@@ -0,0 +1,55 @@
+from collections.abc import Iterator
+from dataclasses import dataclass, field
+from typing import Any, TypeVar, Union
+
+from black.nodes import Visitor
+from black.output import out
+from black.parsing import lib2to3_parse
+from blib2to3.pgen2 import token
+from blib2to3.pytree import Leaf, Node, type_repr
+
+LN = Union[Leaf, Node]
+T = TypeVar("T")
+
+
+@dataclass
+class DebugVisitor(Visitor[T]):
+ tree_depth: int = 0
+ list_output: list[str] = field(default_factory=list)
+ print_output: bool = True
+
+ def out(self, message: str, *args: Any, **kwargs: Any) -> None:
+ self.list_output.append(message)
+ if self.print_output:
+ out(message, *args, **kwargs)
+
+ def visit_default(self, node: LN) -> Iterator[T]:
+ indent = " " * (2 * self.tree_depth)
+ if isinstance(node, Node):
+ _type = type_repr(node.type)
+ self.out(f"{indent}{_type}", fg="yellow")
+ self.tree_depth += 1
+ for child in node.children:
+ yield from self.visit(child)
+
+ self.tree_depth -= 1
+ self.out(f"{indent}/{_type}", fg="yellow", bold=False)
+ else:
+ _type = token.tok_name.get(node.type, str(node.type))
+ self.out(f"{indent}{_type}", fg="blue", nl=False)
+ if node.prefix:
+ # We don't have to handle prefixes for `Node` objects since
+ # that delegates to the first child anyway.
+ self.out(f" {node.prefix!r}", fg="green", bold=False, nl=False)
+ self.out(f" {node.value!r}", fg="blue", bold=False)
+
+ @classmethod
+ def show(cls, code: str | Leaf | Node) -> None:
+ """Pretty-print the lib2to3 AST of a given string of `code`.
+
+ Convenience method for debugging.
+ """
+ v: DebugVisitor[None] = DebugVisitor()
+ if isinstance(code, str):
+ code = lib2to3_parse(code)
+ list(v.visit(code))
diff --git a/py311/lib/python3.11/site-packages/black/files.py b/py311/lib/python3.11/site-packages/black/files.py
new file mode 100644
index 0000000000000000000000000000000000000000..21ad7bc2ae63a5a721d614158e1b8b0a34d27e87
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/files.py
@@ -0,0 +1,426 @@
+import io
+import os
+import sys
+from collections.abc import Iterable, Iterator, Sequence
+from functools import lru_cache
+from pathlib import Path
+from re import Pattern
+from typing import TYPE_CHECKING, Any, Union
+
+from mypy_extensions import mypyc_attr
+from packaging.specifiers import InvalidSpecifier, Specifier, SpecifierSet
+from packaging.version import InvalidVersion, Version
+from pathspec import PathSpec
+from pathspec.patterns.gitwildmatch import GitWildMatchPatternError
+
+if sys.version_info >= (3, 11):
+ try:
+ import tomllib
+ except ImportError:
+ # Help users on older alphas
+ if not TYPE_CHECKING:
+ import tomli as tomllib
+else:
+ import tomli as tomllib
+
+from black.handle_ipynb_magics import jupyter_dependencies_are_installed
+from black.mode import TargetVersion
+from black.output import err
+from black.report import Report
+
+if TYPE_CHECKING:
+ import colorama
+
+
+@lru_cache
+def _load_toml(path: Path | str) -> dict[str, Any]:
+ with open(path, "rb") as f:
+ return tomllib.load(f)
+
+
+@lru_cache
+def _cached_resolve(path: Path) -> Path:
+ return path.resolve()
+
+
+@lru_cache
+def find_project_root(
+ srcs: Sequence[str], stdin_filename: str | None = None
+) -> tuple[Path, str]:
+ """Return a directory containing .git, .hg, or pyproject.toml.
+
+ pyproject.toml files are only considered if they contain a [tool.black]
+ section and are ignored otherwise.
+
+ That directory will be a common parent of all files and directories
+ passed in `srcs`.
+
+ If no directory in the tree contains a marker that would specify it's the
+ project root, the root of the file system is returned.
+
+ Returns a two-tuple with the first element as the project root path and
+ the second element as a string describing the method by which the
+ project root was discovered.
+ """
+ if stdin_filename is not None:
+ srcs = tuple(stdin_filename if s == "-" else s for s in srcs)
+ if not srcs:
+ srcs = [str(_cached_resolve(Path.cwd()))]
+
+ path_srcs = [_cached_resolve(Path(Path.cwd(), src)) for src in srcs]
+
+ # A list of lists of parents for each 'src'. 'src' is included as a
+ # "parent" of itself if it is a directory
+ src_parents = [
+ list(path.parents) + ([path] if path.is_dir() else []) for path in path_srcs
+ ]
+
+ common_base = max(
+ set.intersection(*(set(parents) for parents in src_parents)),
+ key=lambda path: path.parts,
+ )
+
+ for directory in (common_base, *common_base.parents):
+ if (directory / ".git").exists():
+ return directory, ".git directory"
+
+ if (directory / ".hg").is_dir():
+ return directory, ".hg directory"
+
+ if (directory / "pyproject.toml").is_file():
+ pyproject_toml = _load_toml(directory / "pyproject.toml")
+ if "black" in pyproject_toml.get("tool", {}):
+ return directory, "pyproject.toml"
+
+ return directory, "file system root"
+
+
+def find_pyproject_toml(
+ path_search_start: tuple[str, ...], stdin_filename: str | None = None
+) -> str | None:
+ """Find the absolute filepath to a pyproject.toml if it exists"""
+ path_project_root, _ = find_project_root(path_search_start, stdin_filename)
+ path_pyproject_toml = path_project_root / "pyproject.toml"
+ if path_pyproject_toml.is_file():
+ return str(path_pyproject_toml)
+
+ try:
+ path_user_pyproject_toml = find_user_pyproject_toml()
+ return (
+ str(path_user_pyproject_toml)
+ if path_user_pyproject_toml.is_file()
+ else None
+ )
+ except (PermissionError, RuntimeError) as e:
+ # We do not have access to the user-level config directory, so ignore it.
+ err(f"Ignoring user configuration directory due to {e!r}")
+ return None
+
+
+@mypyc_attr(patchable=True)
+def parse_pyproject_toml(path_config: str) -> dict[str, Any]:
+ """Parse a pyproject toml file, pulling out relevant parts for Black.
+
+ If parsing fails, will raise a tomllib.TOMLDecodeError.
+ """
+ pyproject_toml = _load_toml(path_config)
+ config: dict[str, Any] = pyproject_toml.get("tool", {}).get("black", {})
+ config = {k.replace("--", "").replace("-", "_"): v for k, v in config.items()}
+
+ if "target_version" not in config:
+ inferred_target_version = infer_target_version(pyproject_toml)
+ if inferred_target_version is not None:
+ config["target_version"] = [v.name.lower() for v in inferred_target_version]
+
+ return config
+
+
+def infer_target_version(
+ pyproject_toml: dict[str, Any],
+) -> list[TargetVersion] | None:
+ """Infer Black's target version from the project metadata in pyproject.toml.
+
+ Supports the PyPA standard format (PEP 621):
+ https://packaging.python.org/en/latest/specifications/declaring-project-metadata/#requires-python
+
+ If the target version cannot be inferred, returns None.
+ """
+ project_metadata = pyproject_toml.get("project", {})
+ requires_python = project_metadata.get("requires-python", None)
+ if requires_python is not None:
+ try:
+ return parse_req_python_version(requires_python)
+ except InvalidVersion:
+ pass
+ try:
+ return parse_req_python_specifier(requires_python)
+ except (InvalidSpecifier, InvalidVersion):
+ pass
+
+ return None
+
+
+def parse_req_python_version(requires_python: str) -> list[TargetVersion] | None:
+ """Parse a version string (i.e. ``"3.7"``) to a list of TargetVersion.
+
+ If parsing fails, will raise a packaging.version.InvalidVersion error.
+ If the parsed version cannot be mapped to a valid TargetVersion, returns None.
+ """
+ version = Version(requires_python)
+ if version.release[0] != 3:
+ return None
+ try:
+ return [TargetVersion(version.release[1])]
+ except (IndexError, ValueError):
+ return None
+
+
+def parse_req_python_specifier(requires_python: str) -> list[TargetVersion] | None:
+ """Parse a specifier string (i.e. ``">=3.7,<3.10"``) to a list of TargetVersion.
+
+ If parsing fails, will raise a packaging.specifiers.InvalidSpecifier error.
+ If the parsed specifier cannot be mapped to a valid TargetVersion, returns None.
+ """
+ specifier_set = strip_specifier_set(SpecifierSet(requires_python))
+ if not specifier_set:
+ return None
+
+ target_version_map = {f"3.{v.value}": v for v in TargetVersion}
+ compatible_versions: list[str] = list(specifier_set.filter(target_version_map))
+ if compatible_versions:
+ return [target_version_map[v] for v in compatible_versions]
+ return None
+
+
+def strip_specifier_set(specifier_set: SpecifierSet) -> SpecifierSet:
+ """Strip minor versions for some specifiers in the specifier set.
+
+ For background on version specifiers, see PEP 440:
+ https://peps.python.org/pep-0440/#version-specifiers
+ """
+ specifiers = []
+ for s in specifier_set:
+ if "*" in str(s):
+ specifiers.append(s)
+ elif s.operator in ["~=", "==", ">=", "==="]:
+ version = Version(s.version)
+ stripped = Specifier(f"{s.operator}{version.major}.{version.minor}")
+ specifiers.append(stripped)
+ elif s.operator == ">":
+ version = Version(s.version)
+ if len(version.release) > 2:
+ s = Specifier(f">={version.major}.{version.minor}")
+ specifiers.append(s)
+ else:
+ specifiers.append(s)
+
+ return SpecifierSet(",".join(str(s) for s in specifiers))
+
+
+@lru_cache
+def find_user_pyproject_toml() -> Path:
+ r"""Return the path to the top-level user configuration for black.
+
+ This looks for ~\.black on Windows and ~/.config/black on Linux and other
+ Unix systems.
+
+ May raise:
+ - RuntimeError: if the current user has no homedir
+ - PermissionError: if the current process cannot access the user's homedir
+ """
+ if sys.platform == "win32":
+ # Windows
+ user_config_path = Path.home() / ".black"
+ else:
+ config_root = os.environ.get("XDG_CONFIG_HOME", "~/.config")
+ user_config_path = Path(config_root).expanduser() / "black"
+ return _cached_resolve(user_config_path)
+
+
+@lru_cache
+def get_gitignore(root: Path) -> PathSpec:
+ """Return a PathSpec matching gitignore content if present."""
+ gitignore = root / ".gitignore"
+ lines: list[str] = []
+ if gitignore.is_file():
+ with gitignore.open(encoding="utf-8") as gf:
+ lines = gf.readlines()
+ try:
+ return PathSpec.from_lines("gitwildmatch", lines)
+ except GitWildMatchPatternError as e:
+ err(f"Could not parse {gitignore}: {e}")
+ raise
+
+
+def resolves_outside_root_or_cannot_stat(
+ path: Path,
+ root: Path,
+ report: Report | None = None,
+) -> bool:
+ """
+ Returns whether the path is a symbolic link that points outside the
+ root directory. Also returns True if we failed to resolve the path.
+ """
+ try:
+ resolved_path = _cached_resolve(path)
+ except OSError as e:
+ if report:
+ report.path_ignored(path, f"cannot be read because {e}")
+ return True
+ try:
+ resolved_path.relative_to(root)
+ except ValueError:
+ if report:
+ report.path_ignored(path, f"is a symbolic link that points outside {root}")
+ return True
+ return False
+
+
+def best_effort_relative_path(path: Path, root: Path) -> Path:
+ # Precondition: resolves_outside_root_or_cannot_stat(path, root) is False
+ try:
+ return path.absolute().relative_to(root)
+ except ValueError:
+ pass
+ root_parent = next((p for p in path.parents if _cached_resolve(p) == root), None)
+ if root_parent is not None:
+ return path.relative_to(root_parent)
+ # something adversarial, fallback to path guaranteed by precondition
+ return _cached_resolve(path).relative_to(root)
+
+
+def _path_is_ignored(
+ root_relative_path: str,
+ root: Path,
+ gitignore_dict: dict[Path, PathSpec],
+) -> bool:
+ path = root / root_relative_path
+ # Note that this logic is sensitive to the ordering of gitignore_dict. Callers must
+ # ensure that gitignore_dict is ordered from least specific to most specific.
+ for gitignore_path, pattern in gitignore_dict.items():
+ try:
+ relative_path = path.relative_to(gitignore_path).as_posix()
+ if path.is_dir():
+ relative_path = relative_path + "/"
+ except ValueError:
+ break
+ if pattern.match_file(relative_path):
+ return True
+ return False
+
+
+def path_is_excluded(
+ normalized_path: str,
+ pattern: Pattern[str] | None,
+) -> bool:
+ match = pattern.search(normalized_path) if pattern else None
+ return bool(match and match.group(0))
+
+
+def gen_python_files(
+ paths: Iterable[Path],
+ root: Path,
+ include: Pattern[str],
+ exclude: Pattern[str],
+ extend_exclude: Pattern[str] | None,
+ force_exclude: Pattern[str] | None,
+ report: Report,
+ gitignore_dict: dict[Path, PathSpec] | None,
+ *,
+ verbose: bool,
+ quiet: bool,
+) -> Iterator[Path]:
+ """Generate all files under `path` whose paths are not excluded by the
+ `exclude_regex`, `extend_exclude`, or `force_exclude` regexes,
+ but are included by the `include` regex.
+
+ Symbolic links pointing outside of the `root` directory are ignored.
+
+ `report` is where output about exclusions goes.
+ """
+
+ assert root.is_absolute(), f"INTERNAL ERROR: `root` must be absolute but is {root}"
+ for child in paths:
+ assert child.is_absolute()
+ root_relative_path = child.relative_to(root).as_posix()
+
+ # First ignore files matching .gitignore, if passed
+ if gitignore_dict and _path_is_ignored(
+ root_relative_path, root, gitignore_dict
+ ):
+ report.path_ignored(child, "matches a .gitignore file content")
+ continue
+
+ # Then ignore with `--exclude` `--extend-exclude` and `--force-exclude` options.
+ root_relative_path = "/" + root_relative_path
+ if child.is_dir():
+ root_relative_path += "/"
+
+ if path_is_excluded(root_relative_path, exclude):
+ report.path_ignored(child, "matches the --exclude regular expression")
+ continue
+
+ if path_is_excluded(root_relative_path, extend_exclude):
+ report.path_ignored(
+ child, "matches the --extend-exclude regular expression"
+ )
+ continue
+
+ if path_is_excluded(root_relative_path, force_exclude):
+ report.path_ignored(child, "matches the --force-exclude regular expression")
+ continue
+
+ if resolves_outside_root_or_cannot_stat(child, root, report):
+ continue
+
+ if child.is_dir():
+ # If gitignore is None, gitignore usage is disabled, while a Falsey
+ # gitignore is when the directory doesn't have a .gitignore file.
+ if gitignore_dict is not None:
+ new_gitignore_dict = {
+ **gitignore_dict,
+ root / child: get_gitignore(child),
+ }
+ else:
+ new_gitignore_dict = None
+ yield from gen_python_files(
+ child.iterdir(),
+ root,
+ include,
+ exclude,
+ extend_exclude,
+ force_exclude,
+ report,
+ new_gitignore_dict,
+ verbose=verbose,
+ quiet=quiet,
+ )
+
+ elif child.is_file():
+ if child.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
+ warn=verbose or not quiet
+ ):
+ continue
+ include_match = include.search(root_relative_path) if include else True
+ if include_match:
+ yield child
+
+
+def wrap_stream_for_windows(
+ f: io.TextIOWrapper,
+) -> Union[io.TextIOWrapper, "colorama.AnsiToWin32"]:
+ """
+ Wrap stream with colorama's wrap_stream so colors are shown on Windows.
+
+ If `colorama` is unavailable, the original stream is returned unmodified.
+ Otherwise, the `wrap_stream()` function determines whether the stream needs
+ to be wrapped for a Windows environment and will accordingly either return
+ an `AnsiToWin32` wrapper or the original stream.
+ """
+ try:
+ from colorama.initialise import wrap_stream
+ except ImportError:
+ return f
+ else:
+ # Set `strip=False` to avoid needing to modify test_express_diff_with_color.
+ return wrap_stream(f, convert=None, strip=False, autoreset=False, wrap=True)
diff --git a/py311/lib/python3.11/site-packages/black/handle_ipynb_magics.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/handle_ipynb_magics.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..431014cdf92010fad372d6db94108002718fadd8
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/handle_ipynb_magics.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/handle_ipynb_magics.py b/py311/lib/python3.11/site-packages/black/handle_ipynb_magics.py
new file mode 100644
index 0000000000000000000000000000000000000000..c84fe6219fba92603d2264070040cd60e5e0e97a
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/handle_ipynb_magics.py
@@ -0,0 +1,502 @@
+"""Functions to process IPython magics with."""
+
+import ast
+import collections
+import dataclasses
+import re
+import secrets
+from functools import lru_cache
+from importlib.util import find_spec
+from typing import TypeGuard
+
+from black.mode import Mode
+from black.output import out
+from black.report import NothingChanged
+
+TRANSFORMED_MAGICS = frozenset((
+ "get_ipython().run_cell_magic",
+ "get_ipython().system",
+ "get_ipython().getoutput",
+ "get_ipython().run_line_magic",
+))
+TOKENS_TO_IGNORE = frozenset((
+ "ENDMARKER",
+ "NL",
+ "NEWLINE",
+ "COMMENT",
+ "DEDENT",
+ "UNIMPORTANT_WS",
+ "ESCAPED_NL",
+))
+PYTHON_CELL_MAGICS = frozenset((
+ "capture",
+ "prun",
+ "pypy",
+ "python",
+ "python3",
+ "time",
+ "timeit",
+))
+
+
+@dataclasses.dataclass(frozen=True)
+class Replacement:
+ mask: str
+ src: str
+
+
+@lru_cache
+def jupyter_dependencies_are_installed(*, warn: bool) -> bool:
+ installed = (
+ find_spec("tokenize_rt") is not None and find_spec("IPython") is not None
+ )
+ if not installed and warn:
+ msg = (
+ "Skipping .ipynb files as Jupyter dependencies are not installed.\n"
+ 'You can fix this by running ``pip install "black[jupyter]"``'
+ )
+ out(msg)
+ return installed
+
+
+def validate_cell(src: str, mode: Mode) -> None:
+ r"""Check that cell does not already contain TransformerManager transformations,
+ or non-Python cell magics, which might cause tokenizer_rt to break because of
+ indentations.
+
+ If a cell contains ``!ls``, then it'll be transformed to
+ ``get_ipython().system('ls')``. However, if the cell originally contained
+ ``get_ipython().system('ls')``, then it would get transformed in the same way:
+
+ >>> TransformerManager().transform_cell("get_ipython().system('ls')")
+ "get_ipython().system('ls')\n"
+ >>> TransformerManager().transform_cell("!ls")
+ "get_ipython().system('ls')\n"
+
+ Due to the impossibility of safely roundtripping in such situations, cells
+ containing transformed magics will be ignored.
+ """
+ if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
+ raise NothingChanged
+
+ line = _get_code_start(src)
+ if line.startswith("%%") and (
+ line.split(maxsplit=1)[0][2:]
+ not in PYTHON_CELL_MAGICS | mode.python_cell_magics
+ ):
+ raise NothingChanged
+
+
+def remove_trailing_semicolon(src: str) -> tuple[str, bool]:
+ """Remove trailing semicolon from Jupyter notebook cell.
+
+ For example,
+
+ fig, ax = plt.subplots()
+ ax.plot(x_data, y_data); # plot data
+
+ would become
+
+ fig, ax = plt.subplots()
+ ax.plot(x_data, y_data) # plot data
+
+ Mirrors the logic in `quiet` from `IPython.core.displayhook`, but uses
+ ``tokenize_rt`` so that round-tripping works fine.
+ """
+ from tokenize_rt import reversed_enumerate, src_to_tokens, tokens_to_src
+
+ tokens = src_to_tokens(src)
+ trailing_semicolon = False
+ for idx, token in reversed_enumerate(tokens):
+ if token.name in TOKENS_TO_IGNORE:
+ continue
+ if token.name == "OP" and token.src == ";":
+ del tokens[idx]
+ trailing_semicolon = True
+ break
+ if not trailing_semicolon:
+ return src, False
+ return tokens_to_src(tokens), True
+
+
+def put_trailing_semicolon_back(src: str, has_trailing_semicolon: bool) -> str:
+ """Put trailing semicolon back if cell originally had it.
+
+ Mirrors the logic in `quiet` from `IPython.core.displayhook`, but uses
+ ``tokenize_rt`` so that round-tripping works fine.
+ """
+ if not has_trailing_semicolon:
+ return src
+ from tokenize_rt import reversed_enumerate, src_to_tokens, tokens_to_src
+
+ tokens = src_to_tokens(src)
+ for idx, token in reversed_enumerate(tokens):
+ if token.name in TOKENS_TO_IGNORE:
+ continue
+ tokens[idx] = token._replace(src=token.src + ";")
+ break
+ else: # pragma: nocover
+ raise AssertionError(
+ "INTERNAL ERROR: Was not able to reinstate trailing semicolon. "
+ "Please report a bug on https://github.com/psf/black/issues. "
+ ) from None
+ return str(tokens_to_src(tokens))
+
+
+def mask_cell(src: str) -> tuple[str, list[Replacement]]:
+ """Mask IPython magics so content becomes parseable Python code.
+
+ For example,
+
+ %matplotlib inline
+ 'foo'
+
+ becomes
+
+ b"25716f358c32750"
+ 'foo'
+
+ The replacements are returned, along with the transformed code.
+ """
+ replacements: list[Replacement] = []
+ try:
+ ast.parse(src)
+ except SyntaxError:
+ # Might have IPython magics, will process below.
+ pass
+ else:
+ # Syntax is fine, nothing to mask, early return.
+ return src, replacements
+
+ from IPython.core.inputtransformer2 import TransformerManager
+
+ transformer_manager = TransformerManager()
+ # A side effect of the following transformation is that it also removes any
+ # empty lines at the beginning of the cell.
+ transformed = transformer_manager.transform_cell(src)
+ transformed, cell_magic_replacements = replace_cell_magics(transformed)
+ replacements += cell_magic_replacements
+ transformed = transformer_manager.transform_cell(transformed)
+ transformed, magic_replacements = replace_magics(transformed)
+ if len(transformed.strip().splitlines()) != len(src.strip().splitlines()):
+ # Multi-line magic, not supported.
+ raise NothingChanged
+ replacements += magic_replacements
+ return transformed, replacements
+
+
+def create_token(n_chars: int) -> str:
+ """Create a randomly generated token that is n_chars characters long."""
+ assert n_chars > 0
+ n_bytes = max(n_chars // 2 - 1, 1)
+ token = secrets.token_hex(n_bytes)
+ if len(token) + 3 > n_chars:
+ token = token[:-1]
+ # We use a bytestring so that the string does not get interpreted
+ # as a docstring.
+ return f'b"{token}"'
+
+
+def get_token(src: str, magic: str) -> str:
+ """Return randomly generated token to mask IPython magic with.
+
+ For example, if 'magic' was `%matplotlib inline`, then a possible
+ token to mask it with would be `"43fdd17f7e5ddc83"`. The token
+ will be the same length as the magic, and we make sure that it was
+ not already present anywhere else in the cell.
+ """
+ assert magic
+ n_chars = len(magic)
+ token = create_token(n_chars)
+ counter = 0
+ while token in src:
+ token = create_token(n_chars)
+ counter += 1
+ if counter > 100:
+ raise AssertionError(
+ "INTERNAL ERROR: Black was not able to replace IPython magic. "
+ "Please report a bug on https://github.com/psf/black/issues. "
+ f"The magic might be helpful: {magic}"
+ ) from None
+ return token
+
+
+def replace_cell_magics(src: str) -> tuple[str, list[Replacement]]:
+ r"""Replace cell magic with token.
+
+ Note that 'src' will already have been processed by IPython's
+ TransformerManager().transform_cell.
+
+ Example,
+
+ get_ipython().run_cell_magic('t', '-n1', 'ls =!ls\n')
+
+ becomes
+
+ "a794."
+ ls =!ls
+
+ The replacement, along with the transformed code, is returned.
+ """
+ replacements: list[Replacement] = []
+
+ tree = ast.parse(src)
+
+ cell_magic_finder = CellMagicFinder()
+ cell_magic_finder.visit(tree)
+ if cell_magic_finder.cell_magic is None:
+ return src, replacements
+ header = cell_magic_finder.cell_magic.header
+ mask = get_token(src, header)
+ replacements.append(Replacement(mask=mask, src=header))
+ return f"{mask}\n{cell_magic_finder.cell_magic.body}", replacements
+
+
+def replace_magics(src: str) -> tuple[str, list[Replacement]]:
+ """Replace magics within body of cell.
+
+ Note that 'src' will already have been processed by IPython's
+ TransformerManager().transform_cell.
+
+ Example, this
+
+ get_ipython().run_line_magic('matplotlib', 'inline')
+ 'foo'
+
+ becomes
+
+ "5e67db56d490fd39"
+ 'foo'
+
+ The replacement, along with the transformed code, are returned.
+ """
+ replacements = []
+ magic_finder = MagicFinder()
+ magic_finder.visit(ast.parse(src))
+ new_srcs = []
+ for i, line in enumerate(src.split("\n"), start=1):
+ if i in magic_finder.magics:
+ offsets_and_magics = magic_finder.magics[i]
+ if len(offsets_and_magics) != 1: # pragma: nocover
+ raise AssertionError(
+ f"Expecting one magic per line, got: {offsets_and_magics}\n"
+ "Please report a bug on https://github.com/psf/black/issues."
+ )
+ col_offset, magic = (
+ offsets_and_magics[0].col_offset,
+ offsets_and_magics[0].magic,
+ )
+ mask = get_token(src, magic)
+ replacements.append(Replacement(mask=mask, src=magic))
+ line = line[:col_offset] + mask
+ new_srcs.append(line)
+ return "\n".join(new_srcs), replacements
+
+
+def unmask_cell(src: str, replacements: list[Replacement]) -> str:
+ """Remove replacements from cell.
+
+ For example
+
+ "9b20"
+ foo = bar
+
+ becomes
+
+ %%time
+ foo = bar
+ """
+ for replacement in replacements:
+ src = src.replace(replacement.mask, replacement.src)
+ return src
+
+
+def _get_code_start(src: str) -> str:
+ """Provides the first line where the code starts.
+
+ Iterates over lines of code until it finds the first line that doesn't
+ contain only empty spaces and comments. It removes any empty spaces at the
+ start of the line and returns it. If such line doesn't exist, it returns an
+ empty string.
+ """
+ for match in re.finditer(".+", src):
+ line = match.group(0).lstrip()
+ if line and not line.startswith("#"):
+ return line
+ return ""
+
+
+def _is_ipython_magic(node: ast.expr) -> TypeGuard[ast.Attribute]:
+ """Check if attribute is IPython magic.
+
+ Note that the source of the abstract syntax tree
+ will already have been processed by IPython's
+ TransformerManager().transform_cell.
+ """
+ return (
+ isinstance(node, ast.Attribute)
+ and isinstance(node.value, ast.Call)
+ and isinstance(node.value.func, ast.Name)
+ and node.value.func.id == "get_ipython"
+ )
+
+
+def _get_str_args(args: list[ast.expr]) -> list[str]:
+ str_args = []
+ for arg in args:
+ assert isinstance(arg, ast.Constant) and isinstance(arg.value, str)
+ str_args.append(arg.value)
+ return str_args
+
+
+@dataclasses.dataclass(frozen=True)
+class CellMagic:
+ name: str
+ params: str | None
+ body: str
+
+ @property
+ def header(self) -> str:
+ if self.params:
+ return f"%%{self.name} {self.params}"
+ return f"%%{self.name}"
+
+
+# ast.NodeVisitor + dataclass = breakage under mypyc.
+class CellMagicFinder(ast.NodeVisitor):
+ r"""Find cell magics.
+
+ Note that the source of the abstract syntax tree
+ will already have been processed by IPython's
+ TransformerManager().transform_cell.
+
+ For example,
+
+ %%time\n
+ foo()
+
+ would have been transformed to
+
+ get_ipython().run_cell_magic('time', '', 'foo()\n')
+
+ and we look for instances of the latter.
+ """
+
+ def __init__(self, cell_magic: CellMagic | None = None) -> None:
+ self.cell_magic = cell_magic
+
+ def visit_Expr(self, node: ast.Expr) -> None:
+ """Find cell magic, extract header and body."""
+ if (
+ isinstance(node.value, ast.Call)
+ and _is_ipython_magic(node.value.func)
+ and node.value.func.attr == "run_cell_magic"
+ ):
+ args = _get_str_args(node.value.args)
+ self.cell_magic = CellMagic(name=args[0], params=args[1], body=args[2])
+ self.generic_visit(node)
+
+
+@dataclasses.dataclass(frozen=True)
+class OffsetAndMagic:
+ col_offset: int
+ magic: str
+
+
+# Unsurprisingly, subclassing ast.NodeVisitor means we can't use dataclasses here
+# as mypyc will generate broken code.
+class MagicFinder(ast.NodeVisitor):
+ """Visit cell to look for get_ipython calls.
+
+ Note that the source of the abstract syntax tree
+ will already have been processed by IPython's
+ TransformerManager().transform_cell.
+
+ For example,
+
+ %matplotlib inline
+
+ would have been transformed to
+
+ get_ipython().run_line_magic('matplotlib', 'inline')
+
+ and we look for instances of the latter (and likewise for other
+ types of magics).
+ """
+
+ def __init__(self) -> None:
+ self.magics: dict[int, list[OffsetAndMagic]] = collections.defaultdict(list)
+
+ def visit_Assign(self, node: ast.Assign) -> None:
+ """Look for system assign magics.
+
+ For example,
+
+ black_version = !black --version
+ env = %env var
+
+ would have been (respectively) transformed to
+
+ black_version = get_ipython().getoutput('black --version')
+ env = get_ipython().run_line_magic('env', 'var')
+
+ and we look for instances of any of the latter.
+ """
+ if isinstance(node.value, ast.Call) and _is_ipython_magic(node.value.func):
+ args = _get_str_args(node.value.args)
+ if node.value.func.attr == "getoutput":
+ src = f"!{args[0]}"
+ elif node.value.func.attr == "run_line_magic":
+ src = f"%{args[0]}"
+ if args[1]:
+ src += f" {args[1]}"
+ else:
+ raise AssertionError(
+ f"Unexpected IPython magic {node.value.func.attr!r} found. "
+ "Please report a bug on https://github.com/psf/black/issues."
+ ) from None
+ self.magics[node.value.lineno].append(
+ OffsetAndMagic(node.value.col_offset, src)
+ )
+ self.generic_visit(node)
+
+ def visit_Expr(self, node: ast.Expr) -> None:
+ """Look for magics in body of cell.
+
+ For examples,
+
+ !ls
+ !!ls
+ ?ls
+ ??ls
+
+ would (respectively) get transformed to
+
+ get_ipython().system('ls')
+ get_ipython().getoutput('ls')
+ get_ipython().run_line_magic('pinfo', 'ls')
+ get_ipython().run_line_magic('pinfo2', 'ls')
+
+ and we look for instances of any of the latter.
+ """
+ if isinstance(node.value, ast.Call) and _is_ipython_magic(node.value.func):
+ args = _get_str_args(node.value.args)
+ if node.value.func.attr == "run_line_magic":
+ if args[0] == "pinfo":
+ src = f"?{args[1]}"
+ elif args[0] == "pinfo2":
+ src = f"??{args[1]}"
+ else:
+ src = f"%{args[0]}"
+ if args[1]:
+ src += f" {args[1]}"
+ elif node.value.func.attr == "system":
+ src = f"!{args[0]}"
+ elif node.value.func.attr == "getoutput":
+ src = f"!!{args[0]}"
+ else:
+ raise NothingChanged # unsupported magic.
+ self.magics[node.value.lineno].append(
+ OffsetAndMagic(node.value.col_offset, src)
+ )
+ self.generic_visit(node)
diff --git a/py311/lib/python3.11/site-packages/black/linegen.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/linegen.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..af3dc2d952f693313600075cb6e6366cd3b81d94
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/linegen.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/linegen.py b/py311/lib/python3.11/site-packages/black/linegen.py
new file mode 100644
index 0000000000000000000000000000000000000000..8d6fa30c49dc88675ca58d42cbf2c96acffab104
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/linegen.py
@@ -0,0 +1,2035 @@
+"""
+Generating lines of code.
+"""
+
+import re
+import sys
+from collections.abc import Collection, Iterator
+from dataclasses import replace
+from enum import Enum, auto
+from functools import partial, wraps
+from typing import Union, cast
+
+from black.brackets import (
+ COMMA_PRIORITY,
+ DOT_PRIORITY,
+ STRING_PRIORITY,
+ get_leaves_inside_matching_brackets,
+ max_delimiter_priority_in_atom,
+)
+from black.comments import (
+ FMT_OFF,
+ FMT_ON,
+ contains_fmt_directive,
+ generate_comments,
+ list_comments,
+)
+from black.lines import (
+ Line,
+ RHSResult,
+ append_leaves,
+ can_be_split,
+ can_omit_invisible_parens,
+ is_line_short_enough,
+ line_to_string,
+)
+from black.mode import Feature, Mode, Preview
+from black.nodes import (
+ ASSIGNMENTS,
+ BRACKETS,
+ CLOSING_BRACKETS,
+ OPENING_BRACKETS,
+ STANDALONE_COMMENT,
+ STATEMENT,
+ WHITESPACE,
+ Visitor,
+ ensure_visible,
+ fstring_tstring_to_string,
+ get_annotation_type,
+ has_sibling_with_type,
+ is_arith_like,
+ is_async_stmt_or_funcdef,
+ is_atom_with_invisible_parens,
+ is_docstring,
+ is_empty_tuple,
+ is_generator,
+ is_lpar_token,
+ is_multiline_string,
+ is_name_token,
+ is_one_sequence_between,
+ is_one_tuple,
+ is_parent_function_or_class,
+ is_part_of_annotation,
+ is_rpar_token,
+ is_stub_body,
+ is_stub_suite,
+ is_tuple,
+ is_tuple_containing_star,
+ is_tuple_containing_walrus,
+ is_type_ignore_comment_string,
+ is_vararg,
+ is_walrus_assignment,
+ is_yield,
+ syms,
+ wrap_in_parentheses,
+)
+from black.numerics import normalize_numeric_literal
+from black.strings import (
+ fix_multiline_docstring,
+ get_string_prefix,
+ normalize_string_prefix,
+ normalize_string_quotes,
+ normalize_unicode_escape_sequences,
+)
+from black.trans import (
+ CannotTransform,
+ StringMerger,
+ StringParenStripper,
+ StringParenWrapper,
+ StringSplitter,
+ Transformer,
+ hug_power_op,
+)
+from blib2to3.pgen2 import token
+from blib2to3.pytree import Leaf, Node
+
+# types
+LeafID = int
+LN = Union[Leaf, Node]
+
+
+class CannotSplit(CannotTransform):
+ """A readable split that fits the allotted line length is impossible."""
+
+
+# This isn't a dataclass because @dataclass + Generic breaks mypyc.
+# See also https://github.com/mypyc/mypyc/issues/827.
+class LineGenerator(Visitor[Line]):
+ """Generates reformatted Line objects. Empty lines are not emitted.
+
+ Note: destroys the tree it's visiting by mutating prefixes of its leaves
+ in ways that will no longer stringify to valid Python code on the tree.
+ """
+
+ def __init__(self, mode: Mode, features: Collection[Feature]) -> None:
+ self.mode = mode
+ self.features = features
+ self.current_line: Line
+ self.__post_init__()
+
+ def line(self, indent: int = 0) -> Iterator[Line]:
+ """Generate a line.
+
+ If the line is empty, only emit if it makes sense.
+ If the line is too long, split it first and then generate.
+
+ If any lines were generated, set up a new current_line.
+ """
+ if not self.current_line:
+ self.current_line.depth += indent
+ return # Line is empty, don't emit. Creating a new one unnecessary.
+
+ if len(self.current_line.leaves) == 1 and is_async_stmt_or_funcdef(
+ self.current_line.leaves[0]
+ ):
+ # Special case for async def/for/with statements. `visit_async_stmt`
+ # adds an `ASYNC` leaf then visits the child def/for/with statement
+ # nodes. Line yields from those nodes shouldn't treat the former
+ # `ASYNC` leaf as a complete line.
+ return
+
+ complete_line = self.current_line
+ self.current_line = Line(mode=self.mode, depth=complete_line.depth + indent)
+ yield complete_line
+
+ def visit_default(self, node: LN) -> Iterator[Line]:
+ """Default `visit_*()` implementation. Recurses to children of `node`."""
+ if isinstance(node, Leaf):
+ any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
+ for comment in generate_comments(node, mode=self.mode):
+ if any_open_brackets:
+ # any comment within brackets is subject to splitting
+ self.current_line.append(comment)
+ elif comment.type == token.COMMENT:
+ # regular trailing comment
+ self.current_line.append(comment)
+ yield from self.line()
+
+ else:
+ # regular standalone comment
+ yield from self.line()
+
+ self.current_line.append(comment)
+ yield from self.line()
+
+ if any_open_brackets:
+ node.prefix = ""
+ if node.type not in WHITESPACE:
+ self.current_line.append(node)
+ yield from super().visit_default(node)
+
+ def visit_test(self, node: Node) -> Iterator[Line]:
+ """Visit an `x if y else z` test"""
+
+ already_parenthesized = (
+ node.prev_sibling and node.prev_sibling.type == token.LPAR
+ )
+
+ if not already_parenthesized:
+ # Similar to logic in wrap_in_parentheses
+ lpar = Leaf(token.LPAR, "")
+ rpar = Leaf(token.RPAR, "")
+ prefix = node.prefix
+ node.prefix = ""
+ lpar.prefix = prefix
+ node.insert_child(0, lpar)
+ node.append_child(rpar)
+
+ yield from self.visit_default(node)
+
+ def visit_INDENT(self, node: Leaf) -> Iterator[Line]:
+ """Increase indentation level, maybe yield a line."""
+ # In blib2to3 INDENT never holds comments.
+ yield from self.line(+1)
+ yield from self.visit_default(node)
+
+ def visit_DEDENT(self, node: Leaf) -> Iterator[Line]:
+ """Decrease indentation level, maybe yield a line."""
+ # The current line might still wait for trailing comments. At DEDENT time
+ # there won't be any (they would be prefixes on the preceding NEWLINE).
+ # Emit the line then.
+ yield from self.line()
+
+ # While DEDENT has no value, its prefix may contain standalone comments
+ # that belong to the current indentation level. Get 'em.
+ yield from self.visit_default(node)
+
+ # Finally, emit the dedent.
+ yield from self.line(-1)
+
+ def visit_stmt(
+ self, node: Node, keywords: set[str], parens: set[str]
+ ) -> Iterator[Line]:
+ """Visit a statement.
+
+ This implementation is shared for `if`, `while`, `for`, `try`, `except`,
+ `def`, `with`, `class`, `assert`, and assignments.
+
+ The relevant Python language `keywords` for a given statement will be
+ NAME leaves within it. This methods puts those on a separate line.
+
+ `parens` holds a set of string leaf values immediately after which
+ invisible parens should be put.
+ """
+ normalize_invisible_parens(
+ node, parens_after=parens, mode=self.mode, features=self.features
+ )
+ for child in node.children:
+ if is_name_token(child) and child.value in keywords:
+ yield from self.line()
+
+ yield from self.visit(child)
+
+ def visit_typeparams(self, node: Node) -> Iterator[Line]:
+ yield from self.visit_default(node)
+ node.children[0].prefix = ""
+
+ def visit_typevartuple(self, node: Node) -> Iterator[Line]:
+ yield from self.visit_default(node)
+ node.children[1].prefix = ""
+
+ def visit_paramspec(self, node: Node) -> Iterator[Line]:
+ yield from self.visit_default(node)
+ node.children[1].prefix = ""
+
+ def visit_dictsetmaker(self, node: Node) -> Iterator[Line]:
+ if Preview.wrap_long_dict_values_in_parens in self.mode:
+ for i, child in enumerate(node.children):
+ if i == 0:
+ continue
+ if node.children[i - 1].type == token.COLON:
+ if (
+ child.type == syms.atom
+ and child.children[0].type in OPENING_BRACKETS
+ and not is_walrus_assignment(child)
+ ):
+ maybe_make_parens_invisible_in_atom(
+ child,
+ parent=node,
+ mode=self.mode,
+ features=self.features,
+ remove_brackets_around_comma=False,
+ )
+ else:
+ wrap_in_parentheses(node, child, visible=False)
+ yield from self.visit_default(node)
+
+ def visit_funcdef(self, node: Node) -> Iterator[Line]:
+ """Visit function definition."""
+ yield from self.line()
+
+ # Remove redundant brackets around return type annotation.
+ is_return_annotation = False
+ for child in node.children:
+ if child.type == token.RARROW:
+ is_return_annotation = True
+ elif is_return_annotation:
+ if child.type == syms.atom and child.children[0].type == token.LPAR:
+ if maybe_make_parens_invisible_in_atom(
+ child,
+ parent=node,
+ mode=self.mode,
+ features=self.features,
+ remove_brackets_around_comma=False,
+ ):
+ wrap_in_parentheses(node, child, visible=False)
+ else:
+ wrap_in_parentheses(node, child, visible=False)
+ is_return_annotation = False
+
+ for child in node.children:
+ yield from self.visit(child)
+
+ def visit_match_case(self, node: Node) -> Iterator[Line]:
+ """Visit either a match or case statement."""
+ normalize_invisible_parens(
+ node, parens_after=set(), mode=self.mode, features=self.features
+ )
+
+ yield from self.line()
+ for child in node.children:
+ yield from self.visit(child)
+
+ def visit_suite(self, node: Node) -> Iterator[Line]:
+ """Visit a suite."""
+ if is_stub_suite(node):
+ yield from self.visit(node.children[2])
+ else:
+ yield from self.visit_default(node)
+
+ def visit_simple_stmt(self, node: Node) -> Iterator[Line]:
+ """Visit a statement without nested statements."""
+ prev_type: int | None = None
+ for child in node.children:
+ if (prev_type is None or prev_type == token.SEMI) and is_arith_like(child):
+ wrap_in_parentheses(node, child, visible=False)
+ prev_type = child.type
+
+ if node.parent and node.parent.type in STATEMENT:
+ if is_parent_function_or_class(node) and is_stub_body(node):
+ yield from self.visit_default(node)
+ else:
+ yield from self.line(+1)
+ yield from self.visit_default(node)
+ yield from self.line(-1)
+
+ else:
+ if node.parent and is_stub_suite(node.parent):
+ node.prefix = ""
+ yield from self.visit_default(node)
+ return
+ yield from self.line()
+ yield from self.visit_default(node)
+
+ def visit_async_stmt(self, node: Node) -> Iterator[Line]:
+ """Visit `async def`, `async for`, `async with`."""
+ yield from self.line()
+
+ children = iter(node.children)
+ for child in children:
+ yield from self.visit(child)
+
+ if child.type == token.ASYNC or child.type == STANDALONE_COMMENT:
+ # STANDALONE_COMMENT happens when `# fmt: skip` is applied on the async
+ # line.
+ break
+
+ internal_stmt = next(children)
+ yield from self.visit(internal_stmt)
+
+ def visit_decorators(self, node: Node) -> Iterator[Line]:
+ """Visit decorators."""
+ for child in node.children:
+ yield from self.line()
+ yield from self.visit(child)
+
+ def visit_power(self, node: Node) -> Iterator[Line]:
+ for idx, leaf in enumerate(node.children[:-1]):
+ next_leaf = node.children[idx + 1]
+
+ if not isinstance(leaf, Leaf):
+ continue
+
+ value = leaf.value.lower()
+ if (
+ leaf.type == token.NUMBER
+ and next_leaf.type == syms.trailer
+ # Ensure that we are in an attribute trailer
+ and next_leaf.children[0].type == token.DOT
+ # It shouldn't wrap hexadecimal, binary and octal literals
+ and not value.startswith(("0x", "0b", "0o"))
+ # It shouldn't wrap complex literals
+ and "j" not in value
+ ):
+ wrap_in_parentheses(node, leaf)
+
+ remove_await_parens(node, mode=self.mode, features=self.features)
+
+ yield from self.visit_default(node)
+
+ def visit_SEMI(self, leaf: Leaf) -> Iterator[Line]:
+ """Remove a semicolon and put the other statement on a separate line."""
+ yield from self.line()
+
+ def visit_ENDMARKER(self, leaf: Leaf) -> Iterator[Line]:
+ """End of file. Process outstanding comments and end with a newline."""
+ yield from self.visit_default(leaf)
+ yield from self.line()
+
+ def visit_STANDALONE_COMMENT(self, leaf: Leaf) -> Iterator[Line]:
+ any_open_brackets = self.current_line.bracket_tracker.any_open_brackets()
+ if not any_open_brackets:
+ yield from self.line()
+ # STANDALONE_COMMENT nodes created by our special handling in
+ # normalize_fmt_off for comment-only blocks have fmt:off as the first
+ # line and fmt:on as the last line (each directive on its own line,
+ # not embedded in other text). These should be appended directly
+ # without calling visit_default, which would process their prefix and
+ # lose indentation. Normal STANDALONE_COMMENT nodes go through
+ # visit_default.
+ value = leaf.value
+ lines = value.splitlines()
+ is_fmt_off_block = (
+ len(lines) >= 2
+ and contains_fmt_directive(lines[0], FMT_OFF)
+ and contains_fmt_directive(lines[-1], FMT_ON)
+ )
+ if is_fmt_off_block:
+ # This is a fmt:off/on block from normalize_fmt_off - we still need
+ # to process any prefix comments (like markdown comments) but append
+ # the fmt block itself directly to preserve its formatting
+
+ # Only process prefix comments if there actually is a prefix with comments
+ if leaf.prefix and any(
+ line.strip().startswith("#")
+ and not contains_fmt_directive(line.strip())
+ for line in leaf.prefix.split("\n")
+ ):
+ for comment in generate_comments(leaf, mode=self.mode):
+ yield from self.line()
+ self.current_line.append(comment)
+ yield from self.line()
+ # Clear the prefix since we've processed it as comments above
+ leaf.prefix = ""
+
+ self.current_line.append(leaf)
+ if not any_open_brackets:
+ yield from self.line()
+ else:
+ # Normal standalone comment - process through visit_default
+ yield from self.visit_default(leaf)
+
+ def visit_factor(self, node: Node) -> Iterator[Line]:
+ """Force parentheses between a unary op and a binary power:
+
+ -2 ** 8 -> -(2 ** 8)
+ """
+ _operator, operand = node.children
+ if (
+ operand.type == syms.power
+ and len(operand.children) == 3
+ and operand.children[1].type == token.DOUBLESTAR
+ ):
+ lpar = Leaf(token.LPAR, "(")
+ rpar = Leaf(token.RPAR, ")")
+ index = operand.remove() or 0
+ node.insert_child(index, Node(syms.atom, [lpar, operand, rpar]))
+ yield from self.visit_default(node)
+
+ def visit_tname(self, node: Node) -> Iterator[Line]:
+ """
+ Add potential parentheses around types in function parameter lists to be made
+ into real parentheses in case the type hint is too long to fit on a line
+ Examples:
+ def foo(a: int, b: float = 7): ...
+
+ ->
+
+ def foo(a: (int), b: (float) = 7): ...
+ """
+ if len(node.children) == 3 and maybe_make_parens_invisible_in_atom(
+ node.children[2], parent=node, mode=self.mode, features=self.features
+ ):
+ wrap_in_parentheses(node, node.children[2], visible=False)
+
+ yield from self.visit_default(node)
+
+ def visit_STRING(self, leaf: Leaf) -> Iterator[Line]:
+ normalize_unicode_escape_sequences(leaf)
+
+ if is_docstring(leaf) and not re.search(r"\\\s*\n", leaf.value):
+ # We're ignoring docstrings with backslash newline escapes because changing
+ # indentation of those changes the AST representation of the code.
+ if self.mode.string_normalization:
+ docstring = normalize_string_prefix(leaf.value)
+ # We handle string normalization at the end of this method, but since
+ # what we do right now acts differently depending on quote style (ex.
+ # see padding logic below), there's a possibility for unstable
+ # formatting. To avoid a situation where this function formats a
+ # docstring differently on the second pass, normalize it early.
+ docstring = normalize_string_quotes(docstring)
+ else:
+ docstring = leaf.value
+ prefix = get_string_prefix(docstring)
+ docstring = docstring[len(prefix) :] # Remove the prefix
+ quote_char = docstring[0]
+ # A natural way to remove the outer quotes is to do:
+ # docstring = docstring.strip(quote_char)
+ # but that breaks on """""x""" (which is '""x').
+ # So we actually need to remove the first character and the next two
+ # characters but only if they are the same as the first.
+ quote_len = 1 if docstring[1] != quote_char else 3
+ docstring = docstring[quote_len:-quote_len]
+ docstring_started_empty = not docstring
+ indent = " " * 4 * self.current_line.depth
+
+ if is_multiline_string(leaf):
+ docstring = fix_multiline_docstring(docstring, indent)
+ else:
+ docstring = docstring.strip()
+
+ has_trailing_backslash = False
+ if docstring:
+ # Add some padding if the docstring starts / ends with a quote mark.
+ if docstring[0] == quote_char:
+ docstring = " " + docstring
+ if docstring[-1] == quote_char:
+ docstring += " "
+ if docstring[-1] == "\\":
+ backslash_count = len(docstring) - len(docstring.rstrip("\\"))
+ if backslash_count % 2:
+ # Odd number of tailing backslashes, add some padding to
+ # avoid escaping the closing string quote.
+ docstring += " "
+ has_trailing_backslash = True
+ elif not docstring_started_empty:
+ docstring = " "
+
+ # We could enforce triple quotes at this point.
+ quote = quote_char * quote_len
+
+ # It's invalid to put closing single-character quotes on a new line.
+ if quote_len == 3:
+ # We need to find the length of the last line of the docstring
+ # to find if we can add the closing quotes to the line without
+ # exceeding the maximum line length.
+ # If docstring is one line, we don't put the closing quotes on a
+ # separate line because it looks ugly (#3320).
+ lines = docstring.splitlines()
+ last_line_length = len(lines[-1]) if docstring else 0
+
+ # If adding closing quotes would cause the last line to exceed
+ # the maximum line length, and the closing quote is not
+ # prefixed by a newline then put a line break before
+ # the closing quotes
+ if (
+ len(lines) > 1
+ and last_line_length + quote_len > self.mode.line_length
+ and len(indent) + quote_len <= self.mode.line_length
+ and not has_trailing_backslash
+ ):
+ if leaf.value[-1 - quote_len] == "\n":
+ leaf.value = prefix + quote + docstring + quote
+ else:
+ leaf.value = prefix + quote + docstring + "\n" + indent + quote
+ else:
+ leaf.value = prefix + quote + docstring + quote
+ else:
+ leaf.value = prefix + quote + docstring + quote
+
+ if self.mode.string_normalization and leaf.type == token.STRING:
+ leaf.value = normalize_string_prefix(leaf.value)
+ leaf.value = normalize_string_quotes(leaf.value)
+ yield from self.visit_default(leaf)
+
+ def visit_NUMBER(self, leaf: Leaf) -> Iterator[Line]:
+ normalize_numeric_literal(leaf)
+ yield from self.visit_default(leaf)
+
+ def visit_atom(self, node: Node) -> Iterator[Line]:
+ """Visit any atom"""
+ if len(node.children) == 3:
+ first = node.children[0]
+ last = node.children[-1]
+ if (first.type == token.LSQB and last.type == token.RSQB) or (
+ first.type == token.LBRACE and last.type == token.RBRACE
+ ):
+ # Lists or sets of one item
+ maybe_make_parens_invisible_in_atom(
+ node.children[1],
+ parent=node,
+ mode=self.mode,
+ features=self.features,
+ )
+
+ yield from self.visit_default(node)
+
+ def visit_fstring(self, node: Node) -> Iterator[Line]:
+ # currently we don't want to format and split f-strings at all.
+ string_leaf = fstring_tstring_to_string(node)
+ node.replace(string_leaf)
+ if "\\" in string_leaf.value and any(
+ "\\" in str(child)
+ for child in node.children
+ if child.type == syms.fstring_replacement_field
+ ):
+ # string normalization doesn't account for nested quotes,
+ # causing breakages. skip normalization when nested quotes exist
+ yield from self.visit_default(string_leaf)
+ return
+ yield from self.visit_STRING(string_leaf)
+
+ def visit_tstring(self, node: Node) -> Iterator[Line]:
+ # currently we don't want to format and split t-strings at all.
+ string_leaf = fstring_tstring_to_string(node)
+ node.replace(string_leaf)
+ if "\\" in string_leaf.value and any(
+ "\\" in str(child)
+ for child in node.children
+ if child.type == syms.fstring_replacement_field
+ ):
+ # string normalization doesn't account for nested quotes,
+ # causing breakages. skip normalization when nested quotes exist
+ yield from self.visit_default(string_leaf)
+ return
+ yield from self.visit_STRING(string_leaf)
+
+ # TODO: Uncomment Implementation to format f-string children
+ # fstring_start = node.children[0]
+ # fstring_end = node.children[-1]
+ # assert isinstance(fstring_start, Leaf)
+ # assert isinstance(fstring_end, Leaf)
+
+ # quote_char = fstring_end.value[0]
+ # quote_idx = fstring_start.value.index(quote_char)
+ # prefix, quote = (
+ # fstring_start.value[:quote_idx],
+ # fstring_start.value[quote_idx:]
+ # )
+
+ # if not is_docstring(node, self.mode):
+ # prefix = normalize_string_prefix(prefix)
+
+ # assert quote == fstring_end.value
+
+ # is_raw_fstring = "r" in prefix or "R" in prefix
+ # middles = [
+ # leaf
+ # for leaf in node.leaves()
+ # if leaf.type == token.FSTRING_MIDDLE
+ # ]
+
+ # if self.mode.string_normalization:
+ # middles, quote = normalize_fstring_quotes(quote, middles, is_raw_fstring)
+
+ # fstring_start.value = prefix + quote
+ # fstring_end.value = quote
+
+ # yield from self.visit_default(node)
+
+ def visit_comp_for(self, node: Node) -> Iterator[Line]:
+ if Preview.wrap_comprehension_in in self.mode:
+ normalize_invisible_parens(
+ node, parens_after={"in"}, mode=self.mode, features=self.features
+ )
+ yield from self.visit_default(node)
+
+ def visit_old_comp_for(self, node: Node) -> Iterator[Line]:
+ yield from self.visit_comp_for(node)
+
+ def __post_init__(self) -> None:
+ """You are in a twisty little maze of passages."""
+ self.current_line = Line(mode=self.mode)
+
+ v = self.visit_stmt
+ Ø: set[str] = set()
+ self.visit_assert_stmt = partial(v, keywords={"assert"}, parens={"assert", ","})
+ self.visit_if_stmt = partial(
+ v, keywords={"if", "else", "elif"}, parens={"if", "elif"}
+ )
+ self.visit_while_stmt = partial(v, keywords={"while", "else"}, parens={"while"})
+ self.visit_for_stmt = partial(v, keywords={"for", "else"}, parens={"for", "in"})
+ self.visit_try_stmt = partial(
+ v, keywords={"try", "except", "else", "finally"}, parens=Ø
+ )
+ self.visit_except_clause = partial(v, keywords={"except"}, parens={"except"})
+ self.visit_with_stmt = partial(v, keywords={"with"}, parens={"with"})
+ self.visit_classdef = partial(v, keywords={"class"}, parens=Ø)
+
+ self.visit_expr_stmt = partial(v, keywords=Ø, parens=ASSIGNMENTS)
+ self.visit_return_stmt = partial(v, keywords={"return"}, parens={"return"})
+ self.visit_import_from = partial(v, keywords=Ø, parens={"import"})
+ self.visit_del_stmt = partial(v, keywords=Ø, parens={"del"})
+ self.visit_async_funcdef = self.visit_async_stmt
+ self.visit_decorated = self.visit_decorators
+
+ # PEP 634
+ self.visit_match_stmt = self.visit_match_case
+ self.visit_case_block = self.visit_match_case
+ self.visit_guard = partial(v, keywords=Ø, parens={"if"})
+
+
+def _hugging_power_ops_line_to_string(
+ line: Line,
+ features: Collection[Feature],
+ mode: Mode,
+) -> str | None:
+ try:
+ return line_to_string(next(hug_power_op(line, features, mode)))
+ except CannotTransform:
+ return None
+
+
+def transform_line(
+ line: Line, mode: Mode, features: Collection[Feature] = ()
+) -> Iterator[Line]:
+ """Transform a `line`, potentially splitting it into many lines.
+
+ They should fit in the allotted `line_length` but might not be able to.
+
+ `features` are syntactical features that may be used in the output.
+ """
+ if line.is_comment:
+ yield line
+ return
+
+ line_str = line_to_string(line)
+
+ # We need the line string when power operators are hugging to determine if we should
+ # split the line. Default to line_str, if no power operator are present on the line.
+ line_str_hugging_power_ops = (
+ _hugging_power_ops_line_to_string(line, features, mode) or line_str
+ )
+
+ ll = mode.line_length
+ sn = mode.string_normalization
+ string_merge = StringMerger(ll, sn)
+ string_paren_strip = StringParenStripper(ll, sn)
+ string_split = StringSplitter(ll, sn)
+ string_paren_wrap = StringParenWrapper(ll, sn)
+
+ transformers: list[Transformer]
+ if (
+ not line.contains_uncollapsable_type_comments()
+ and not line.should_split_rhs
+ and not line.magic_trailing_comma
+ and (
+ is_line_short_enough(line, mode=mode, line_str=line_str_hugging_power_ops)
+ or line.contains_unsplittable_type_ignore()
+ )
+ and not (line.inside_brackets and line.contains_standalone_comments())
+ and not line.contains_implicit_multiline_string_with_comments()
+ ):
+ # Only apply basic string preprocessing, since lines shouldn't be split here.
+ if Preview.string_processing in mode:
+ transformers = [string_merge, string_paren_strip]
+ else:
+ transformers = []
+ elif line.is_def and not should_split_funcdef_with_rhs(line, mode):
+ transformers = [left_hand_split]
+ else:
+
+ def _rhs(
+ self: object, line: Line, features: Collection[Feature], mode: Mode
+ ) -> Iterator[Line]:
+ """Wraps calls to `right_hand_split`.
+
+ The calls increasingly `omit` right-hand trailers (bracket pairs with
+ content), meaning the trailers get glued together to split on another
+ bracket pair instead.
+ """
+ for omit in generate_trailers_to_omit(line, mode.line_length):
+ lines = list(right_hand_split(line, mode, features, omit=omit))
+ # Note: this check is only able to figure out if the first line of the
+ # *current* transformation fits in the line length. This is true only
+ # for simple cases. All others require running more transforms via
+ # `transform_line()`. This check doesn't know if those would succeed.
+ if is_line_short_enough(lines[0], mode=mode):
+ yield from lines
+ return
+
+ # All splits failed, best effort split with no omits.
+ # This mostly happens to multiline strings that are by definition
+ # reported as not fitting a single line, as well as lines that contain
+ # trailing commas (those have to be exploded).
+ yield from right_hand_split(line, mode, features=features)
+
+ # HACK: nested functions (like _rhs) compiled by mypyc don't retain their
+ # __name__ attribute which is needed in `run_transformer` further down.
+ # Unfortunately a nested class breaks mypyc too. So a class must be created
+ # via type ... https://github.com/mypyc/mypyc/issues/884
+ rhs = type("rhs", (), {"__call__": _rhs})()
+
+ if Preview.string_processing in mode:
+ if line.inside_brackets:
+ transformers = [
+ string_merge,
+ string_paren_strip,
+ string_split,
+ delimiter_split,
+ standalone_comment_split,
+ string_paren_wrap,
+ rhs,
+ ]
+ else:
+ transformers = [
+ string_merge,
+ string_paren_strip,
+ string_split,
+ string_paren_wrap,
+ rhs,
+ ]
+ else:
+ if line.inside_brackets:
+ transformers = [delimiter_split, standalone_comment_split, rhs]
+ else:
+ transformers = [rhs]
+ # It's always safe to attempt hugging of power operations and pretty much every line
+ # could match.
+ transformers.append(hug_power_op)
+
+ for transform in transformers:
+ # We are accumulating lines in `result` because we might want to abort
+ # mission and return the original line in the end, or attempt a different
+ # split altogether.
+ try:
+ result = run_transformer(line, transform, mode, features, line_str=line_str)
+ except CannotTransform:
+ continue
+ else:
+ yield from result
+ break
+
+ else:
+ yield line
+
+
+def should_split_funcdef_with_rhs(line: Line, mode: Mode) -> bool:
+ """If a funcdef has a magic trailing comma in the return type, then we should first
+ split the line with rhs to respect the comma.
+ """
+ return_type_leaves: list[Leaf] = []
+ in_return_type = False
+
+ for leaf in line.leaves:
+ if leaf.type == token.COLON:
+ in_return_type = False
+ if in_return_type:
+ return_type_leaves.append(leaf)
+ if leaf.type == token.RARROW:
+ in_return_type = True
+
+ # using `bracket_split_build_line` will mess with whitespace, so we duplicate a
+ # couple lines from it.
+ result = Line(mode=line.mode, depth=line.depth)
+ leaves_to_track = get_leaves_inside_matching_brackets(return_type_leaves)
+ for leaf in return_type_leaves:
+ result.append(
+ leaf,
+ preformatted=True,
+ track_bracket=id(leaf) in leaves_to_track,
+ )
+
+ # we could also return true if the line is too long, and the return type is longer
+ # than the param list. Or if `should_split_rhs` returns True.
+ return result.magic_trailing_comma is not None
+
+
+class _BracketSplitComponent(Enum):
+ head = auto()
+ body = auto()
+ tail = auto()
+
+
+def left_hand_split(
+ line: Line, _features: Collection[Feature], mode: Mode
+) -> Iterator[Line]:
+ """Split line into many lines, starting with the first matching bracket pair.
+
+ Note: this usually looks weird, only use this for function definitions.
+ Prefer RHS otherwise. This is why this function is not symmetrical with
+ :func:`right_hand_split` which also handles optional parentheses.
+ """
+ for leaf_type in [token.LPAR, token.LSQB]:
+ tail_leaves: list[Leaf] = []
+ body_leaves: list[Leaf] = []
+ head_leaves: list[Leaf] = []
+ current_leaves = head_leaves
+ matching_bracket: Leaf | None = None
+ depth = 0
+ for index, leaf in enumerate(line.leaves):
+ if index == 2 and leaf.type == token.LSQB:
+ # A [ at index 2 means this is a type param, so start
+ # tracking the depth
+ depth += 1
+ elif depth > 0:
+ if leaf.type == token.LSQB:
+ depth += 1
+ elif leaf.type == token.RSQB:
+ depth -= 1
+ if (
+ current_leaves is body_leaves
+ and leaf.type in CLOSING_BRACKETS
+ and leaf.opening_bracket is matching_bracket
+ and isinstance(matching_bracket, Leaf)
+ # If the code is still on LPAR and we are inside a type
+ # param, ignore the match since this is searching
+ # for the function arguments
+ and not (leaf_type == token.LPAR and depth > 0)
+ ):
+ ensure_visible(leaf)
+ ensure_visible(matching_bracket)
+ current_leaves = tail_leaves if body_leaves else head_leaves
+ current_leaves.append(leaf)
+ if current_leaves is head_leaves:
+ if leaf.type == leaf_type and (
+ Preview.fix_type_expansion_split not in mode
+ or not (leaf_type == token.LPAR and depth > 0)
+ ):
+ matching_bracket = leaf
+ current_leaves = body_leaves
+ if matching_bracket and tail_leaves:
+ break
+ if not matching_bracket or not tail_leaves:
+ raise CannotSplit("No brackets found")
+
+ head = bracket_split_build_line(
+ head_leaves, line, matching_bracket, component=_BracketSplitComponent.head
+ )
+ body = bracket_split_build_line(
+ body_leaves, line, matching_bracket, component=_BracketSplitComponent.body
+ )
+ tail = bracket_split_build_line(
+ tail_leaves, line, matching_bracket, component=_BracketSplitComponent.tail
+ )
+ bracket_split_succeeded_or_raise(head, body, tail)
+ for result in (head, body, tail):
+ if result:
+ yield result
+
+
+def right_hand_split(
+ line: Line,
+ mode: Mode,
+ features: Collection[Feature] = (),
+ omit: Collection[LeafID] = (),
+) -> Iterator[Line]:
+ """Split line into many lines, starting with the last matching bracket pair.
+
+ If the split was by optional parentheses, attempt splitting without them, too.
+ `omit` is a collection of closing bracket IDs that shouldn't be considered for
+ this split.
+
+ Note: running this function modifies `bracket_depth` on the leaves of `line`.
+ """
+ rhs_result = _first_right_hand_split(line, omit=omit)
+ yield from _maybe_split_omitting_optional_parens(
+ rhs_result, line, mode, features=features, omit=omit
+ )
+
+
+def _first_right_hand_split(
+ line: Line,
+ omit: Collection[LeafID] = (),
+) -> RHSResult:
+ """Split the line into head, body, tail starting with the last bracket pair.
+
+ Note: this function should not have side effects. It's relied upon by
+ _maybe_split_omitting_optional_parens to get an opinion whether to prefer
+ splitting on the right side of an assignment statement.
+ """
+ tail_leaves: list[Leaf] = []
+ body_leaves: list[Leaf] = []
+ head_leaves: list[Leaf] = []
+ current_leaves = tail_leaves
+ opening_bracket: Leaf | None = None
+ closing_bracket: Leaf | None = None
+ for leaf in reversed(line.leaves):
+ if current_leaves is body_leaves:
+ if leaf is opening_bracket:
+ current_leaves = head_leaves if body_leaves else tail_leaves
+ current_leaves.append(leaf)
+ if current_leaves is tail_leaves:
+ if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:
+ opening_bracket = leaf.opening_bracket
+ closing_bracket = leaf
+ current_leaves = body_leaves
+ if not (opening_bracket and closing_bracket and head_leaves):
+ # If there is no opening or closing_bracket that means the split failed and
+ # all content is in the tail. Otherwise, if `head_leaves` are empty, it means
+ # the matching `opening_bracket` wasn't available on `line` anymore.
+ raise CannotSplit("No brackets found")
+
+ tail_leaves.reverse()
+ body_leaves.reverse()
+ head_leaves.reverse()
+
+ body: Line | None = None
+ if (
+ Preview.hug_parens_with_braces_and_square_brackets in line.mode
+ and tail_leaves[0].value
+ and tail_leaves[0].opening_bracket is head_leaves[-1]
+ ):
+ inner_body_leaves = list(body_leaves)
+ hugged_opening_leaves: list[Leaf] = []
+ hugged_closing_leaves: list[Leaf] = []
+ is_unpacking = body_leaves[0].type in [token.STAR, token.DOUBLESTAR]
+ unpacking_offset: int = 1 if is_unpacking else 0
+ while (
+ len(inner_body_leaves) >= 2 + unpacking_offset
+ and inner_body_leaves[-1].type in CLOSING_BRACKETS
+ and inner_body_leaves[-1].opening_bracket
+ is inner_body_leaves[unpacking_offset]
+ ):
+ if unpacking_offset:
+ hugged_opening_leaves.append(inner_body_leaves.pop(0))
+ unpacking_offset = 0
+ hugged_opening_leaves.append(inner_body_leaves.pop(0))
+ hugged_closing_leaves.insert(0, inner_body_leaves.pop())
+
+ if hugged_opening_leaves and inner_body_leaves:
+ inner_body = bracket_split_build_line(
+ inner_body_leaves,
+ line,
+ hugged_opening_leaves[-1],
+ component=_BracketSplitComponent.body,
+ )
+ if (
+ line.mode.magic_trailing_comma
+ and inner_body_leaves[-1].type == token.COMMA
+ ):
+ should_hug = True
+ else:
+ line_length = line.mode.line_length - sum(
+ len(str(leaf))
+ for leaf in hugged_opening_leaves + hugged_closing_leaves
+ )
+ if is_line_short_enough(
+ inner_body, mode=replace(line.mode, line_length=line_length)
+ ):
+ # Do not hug if it fits on a single line.
+ should_hug = False
+ else:
+ should_hug = True
+ if should_hug:
+ body_leaves = inner_body_leaves
+ head_leaves.extend(hugged_opening_leaves)
+ tail_leaves = hugged_closing_leaves + tail_leaves
+ body = inner_body # No need to re-calculate the body again later.
+
+ head = bracket_split_build_line(
+ head_leaves, line, opening_bracket, component=_BracketSplitComponent.head
+ )
+ if body is None:
+ body = bracket_split_build_line(
+ body_leaves, line, opening_bracket, component=_BracketSplitComponent.body
+ )
+ tail = bracket_split_build_line(
+ tail_leaves, line, opening_bracket, component=_BracketSplitComponent.tail
+ )
+ bracket_split_succeeded_or_raise(head, body, tail)
+ return RHSResult(head, body, tail, opening_bracket, closing_bracket)
+
+
+def _maybe_split_omitting_optional_parens(
+ rhs: RHSResult,
+ line: Line,
+ mode: Mode,
+ features: Collection[Feature] = (),
+ omit: Collection[LeafID] = (),
+) -> Iterator[Line]:
+ if (
+ Feature.FORCE_OPTIONAL_PARENTHESES not in features
+ # the opening bracket is an optional paren
+ and rhs.opening_bracket.type == token.LPAR
+ and not rhs.opening_bracket.value
+ # the closing bracket is an optional paren
+ and rhs.closing_bracket.type == token.RPAR
+ and not rhs.closing_bracket.value
+ # it's not an import (optional parens are the only thing we can split on
+ # in this case; attempting a split without them is a waste of time)
+ and not line.is_import
+ # and we can actually remove the parens
+ and can_omit_invisible_parens(rhs, mode.line_length)
+ ):
+ omit = {id(rhs.closing_bracket), *omit}
+ try:
+ # The RHSResult Omitting Optional Parens.
+ rhs_oop = _first_right_hand_split(line, omit=omit)
+ if _prefer_split_rhs_oop_over_rhs(rhs_oop, rhs, mode):
+ yield from _maybe_split_omitting_optional_parens(
+ rhs_oop, line, mode, features=features, omit=omit
+ )
+ return
+
+ except CannotSplit as e:
+ # For chained assignments we want to use the previous successful split
+ if line.is_chained_assignment:
+ pass
+
+ elif (
+ not can_be_split(rhs.body)
+ and not is_line_short_enough(rhs.body, mode=mode)
+ and not (
+ Preview.wrap_long_dict_values_in_parens
+ and rhs.opening_bracket.parent
+ and rhs.opening_bracket.parent.parent
+ and rhs.opening_bracket.parent.parent.type == syms.dictsetmaker
+ )
+ ):
+ raise CannotSplit(
+ "Splitting failed, body is still too long and can't be split."
+ ) from e
+
+ elif (
+ rhs.head.contains_multiline_strings()
+ or rhs.tail.contains_multiline_strings()
+ ):
+ raise CannotSplit(
+ "The current optional pair of parentheses is bound to fail to"
+ " satisfy the splitting algorithm because the head or the tail"
+ " contains multiline strings which by definition never fit one"
+ " line."
+ ) from e
+
+ ensure_visible(rhs.opening_bracket)
+ ensure_visible(rhs.closing_bracket)
+ for result in (rhs.head, rhs.body, rhs.tail):
+ if result:
+ yield result
+
+
+def _prefer_split_rhs_oop_over_rhs(
+ rhs_oop: RHSResult, rhs: RHSResult, mode: Mode
+) -> bool:
+ """
+ Returns whether we should prefer the result from a split omitting optional parens
+ (rhs_oop) over the original (rhs).
+ """
+ # contains unsplittable type ignore
+ if (
+ rhs_oop.head.contains_unsplittable_type_ignore()
+ or rhs_oop.body.contains_unsplittable_type_ignore()
+ or rhs_oop.tail.contains_unsplittable_type_ignore()
+ ):
+ return True
+
+ # Retain optional parens around dictionary values
+ if (
+ Preview.wrap_long_dict_values_in_parens
+ and rhs.opening_bracket.parent
+ and rhs.opening_bracket.parent.parent
+ and rhs.opening_bracket.parent.parent.type == syms.dictsetmaker
+ and rhs.body.bracket_tracker.delimiters
+ ):
+ # Unless the split is inside the key
+ return any(leaf.type == token.COLON for leaf in rhs_oop.tail.leaves)
+
+ # the split is right after `=`
+ if not (len(rhs.head.leaves) >= 2 and rhs.head.leaves[-2].type == token.EQUAL):
+ return True
+
+ # the left side of assignment contains brackets
+ if not any(leaf.type in BRACKETS for leaf in rhs.head.leaves[:-1]):
+ return True
+
+ # the left side of assignment is short enough (the -1 is for the ending optional
+ # paren)
+ if not is_line_short_enough(
+ rhs.head, mode=replace(mode, line_length=mode.line_length - 1)
+ ):
+ return True
+
+ # the left side of assignment won't explode further because of magic trailing comma
+ if rhs.head.magic_trailing_comma is not None:
+ return True
+
+ # If we have multiple targets, we prefer more `=`s on the head vs pushing them to
+ # the body
+ rhs_head_equal_count = [leaf.type for leaf in rhs.head.leaves].count(token.EQUAL)
+ rhs_oop_head_equal_count = [leaf.type for leaf in rhs_oop.head.leaves].count(
+ token.EQUAL
+ )
+ if rhs_head_equal_count > 1 and rhs_head_equal_count > rhs_oop_head_equal_count:
+ return False
+
+ has_closing_bracket_after_assign = False
+ for leaf in reversed(rhs_oop.head.leaves):
+ if leaf.type == token.EQUAL:
+ break
+ if leaf.type in CLOSING_BRACKETS:
+ has_closing_bracket_after_assign = True
+ break
+ return (
+ # contains matching brackets after the `=` (done by checking there is a
+ # closing bracket)
+ has_closing_bracket_after_assign
+ or (
+ # the split is actually from inside the optional parens (done by checking
+ # the first line still contains the `=`)
+ any(leaf.type == token.EQUAL for leaf in rhs_oop.head.leaves)
+ # the first line is short enough
+ and is_line_short_enough(rhs_oop.head, mode=mode)
+ )
+ )
+
+
+def bracket_split_succeeded_or_raise(head: Line, body: Line, tail: Line) -> None:
+ """Raise :exc:`CannotSplit` if the last left- or right-hand split failed.
+
+ Do nothing otherwise.
+
+ A left- or right-hand split is based on a pair of brackets. Content before
+ (and including) the opening bracket is left on one line, content inside the
+ brackets is put on a separate line, and finally content starting with and
+ following the closing bracket is put on a separate line.
+
+ Those are called `head`, `body`, and `tail`, respectively. If the split
+ produced the same line (all content in `head`) or ended up with an empty `body`
+ and the `tail` is just the closing bracket, then it's considered failed.
+ """
+ tail_len = len(str(tail).strip())
+ if not body:
+ if tail_len == 0:
+ raise CannotSplit("Splitting brackets produced the same line")
+
+ elif tail_len < 3:
+ raise CannotSplit(
+ f"Splitting brackets on an empty body to save {tail_len} characters is"
+ " not worth it"
+ )
+
+
+def _ensure_trailing_comma(
+ leaves: list[Leaf], original: Line, opening_bracket: Leaf
+) -> bool:
+ if not leaves:
+ return False
+ # Ensure a trailing comma for imports
+ if original.is_import:
+ return True
+ # ...and standalone function arguments
+ if not original.is_def:
+ return False
+ if opening_bracket.value != "(":
+ return False
+ # Don't add commas if we already have any commas
+ if any(
+ leaf.type == token.COMMA and not is_part_of_annotation(leaf) for leaf in leaves
+ ):
+ return False
+
+ # Find a leaf with a parent (comments don't have parents)
+ leaf_with_parent = next((leaf for leaf in leaves if leaf.parent), None)
+ if leaf_with_parent is None:
+ return True
+ # Don't add commas inside parenthesized return annotations
+ if get_annotation_type(leaf_with_parent) == "return":
+ return False
+ # Don't add commas inside PEP 604 unions
+ if (
+ leaf_with_parent.parent
+ and leaf_with_parent.parent.next_sibling
+ and leaf_with_parent.parent.next_sibling.type == token.VBAR
+ ):
+ return False
+ return True
+
+
+def bracket_split_build_line(
+ leaves: list[Leaf],
+ original: Line,
+ opening_bracket: Leaf,
+ *,
+ component: _BracketSplitComponent,
+) -> Line:
+ """Return a new line with given `leaves` and respective comments from `original`.
+
+ If it's the head component, brackets will be tracked so trailing commas are
+ respected.
+
+ If it's the body component, the result line is one-indented inside brackets and as
+ such has its first leaf's prefix normalized and a trailing comma added when
+ expected.
+ """
+ result = Line(mode=original.mode, depth=original.depth)
+ if component is _BracketSplitComponent.body:
+ result.inside_brackets = True
+ result.depth += 1
+ if _ensure_trailing_comma(leaves, original, opening_bracket):
+ for i in range(len(leaves) - 1, -1, -1):
+ if leaves[i].type == STANDALONE_COMMENT:
+ continue
+
+ if leaves[i].type != token.COMMA:
+ new_comma = Leaf(token.COMMA, ",")
+ leaves.insert(i + 1, new_comma)
+ break
+
+ leaves_to_track: set[LeafID] = set()
+ if component is _BracketSplitComponent.head:
+ leaves_to_track = get_leaves_inside_matching_brackets(leaves)
+ # Populate the line
+ for leaf in leaves:
+ result.append(
+ leaf,
+ preformatted=True,
+ track_bracket=id(leaf) in leaves_to_track,
+ )
+ for comment_after in original.comments_after(leaf):
+ result.append(comment_after, preformatted=True)
+ if component is _BracketSplitComponent.body and should_split_line(
+ result, opening_bracket
+ ):
+ result.should_split_rhs = True
+ return result
+
+
+def dont_increase_indentation(split_func: Transformer) -> Transformer:
+ """Normalize prefix of the first leaf in every line returned by `split_func`.
+
+ This is a decorator over relevant split functions.
+ """
+
+ @wraps(split_func)
+ def split_wrapper(
+ line: Line, features: Collection[Feature], mode: Mode
+ ) -> Iterator[Line]:
+ for split_line in split_func(line, features, mode):
+ split_line.leaves[0].prefix = ""
+ yield split_line
+
+ return split_wrapper
+
+
+def _get_last_non_comment_leaf(line: Line) -> int | None:
+ for leaf_idx in range(len(line.leaves) - 1, 0, -1):
+ if line.leaves[leaf_idx].type != STANDALONE_COMMENT:
+ return leaf_idx
+ return None
+
+
+def _can_add_trailing_comma(leaf: Leaf, features: Collection[Feature]) -> bool:
+ if is_vararg(leaf, within={syms.typedargslist}):
+ return Feature.TRAILING_COMMA_IN_DEF in features
+ if is_vararg(leaf, within={syms.arglist, syms.argument}):
+ return Feature.TRAILING_COMMA_IN_CALL in features
+ return True
+
+
+def _safe_add_trailing_comma(safe: bool, delimiter_priority: int, line: Line) -> Line:
+ if (
+ safe
+ and delimiter_priority == COMMA_PRIORITY
+ and line.leaves[-1].type != token.COMMA
+ and line.leaves[-1].type != STANDALONE_COMMENT
+ ):
+ new_comma = Leaf(token.COMMA, ",")
+ line.append(new_comma)
+ return line
+
+
+MIGRATE_COMMENT_DELIMITERS = {STRING_PRIORITY, COMMA_PRIORITY}
+
+
+@dont_increase_indentation
+def delimiter_split(
+ line: Line, features: Collection[Feature], mode: Mode
+) -> Iterator[Line]:
+ """Split according to delimiters of the highest priority.
+
+ If the appropriate Features are given, the split will add trailing commas
+ also in function signatures and calls that contain `*` and `**`.
+ """
+ if len(line.leaves) == 0:
+ raise CannotSplit("Line empty") from None
+ last_leaf = line.leaves[-1]
+
+ bt = line.bracket_tracker
+ try:
+ delimiter_priority = bt.max_delimiter_priority(exclude={id(last_leaf)})
+ except ValueError:
+ raise CannotSplit("No delimiters found") from None
+
+ if (
+ delimiter_priority == DOT_PRIORITY
+ and bt.delimiter_count_with_priority(delimiter_priority) == 1
+ ):
+ raise CannotSplit("Splitting a single attribute from its owner looks wrong")
+
+ current_line = Line(
+ mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
+ )
+ lowest_depth = sys.maxsize
+ trailing_comma_safe = True
+
+ def append_to_line(leaf: Leaf) -> Iterator[Line]:
+ """Append `leaf` to current line or to new line if appending impossible."""
+ nonlocal current_line
+ try:
+ current_line.append_safe(leaf, preformatted=True)
+ except ValueError:
+ yield current_line
+
+ current_line = Line(
+ mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
+ )
+ current_line.append(leaf)
+
+ def append_comments(leaf: Leaf) -> Iterator[Line]:
+ for comment_after in line.comments_after(leaf):
+ yield from append_to_line(comment_after)
+
+ last_non_comment_leaf = _get_last_non_comment_leaf(line)
+ for leaf_idx, leaf in enumerate(line.leaves):
+ yield from append_to_line(leaf)
+
+ previous_priority = leaf_idx > 0 and bt.delimiters.get(
+ id(line.leaves[leaf_idx - 1])
+ )
+ if (
+ previous_priority != delimiter_priority
+ or delimiter_priority in MIGRATE_COMMENT_DELIMITERS
+ ):
+ yield from append_comments(leaf)
+
+ lowest_depth = min(lowest_depth, leaf.bracket_depth)
+ if trailing_comma_safe and leaf.bracket_depth == lowest_depth:
+ trailing_comma_safe = _can_add_trailing_comma(leaf, features)
+
+ if last_leaf.type == STANDALONE_COMMENT and leaf_idx == last_non_comment_leaf:
+ current_line = _safe_add_trailing_comma(
+ trailing_comma_safe, delimiter_priority, current_line
+ )
+
+ leaf_priority = bt.delimiters.get(id(leaf))
+ if leaf_priority == delimiter_priority:
+ if (
+ leaf_idx + 1 < len(line.leaves)
+ and delimiter_priority not in MIGRATE_COMMENT_DELIMITERS
+ ):
+ yield from append_comments(line.leaves[leaf_idx + 1])
+
+ yield current_line
+ current_line = Line(
+ mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
+ )
+
+ if current_line:
+ current_line = _safe_add_trailing_comma(
+ trailing_comma_safe, delimiter_priority, current_line
+ )
+ yield current_line
+
+
+@dont_increase_indentation
+def standalone_comment_split(
+ line: Line, features: Collection[Feature], mode: Mode
+) -> Iterator[Line]:
+ """Split standalone comments from the rest of the line."""
+ if not line.contains_standalone_comments():
+ raise CannotSplit("Line does not have any standalone comments")
+
+ current_line = Line(
+ mode=line.mode, depth=line.depth, inside_brackets=line.inside_brackets
+ )
+
+ def append_to_line(leaf: Leaf) -> Iterator[Line]:
+ """Append `leaf` to current line or to new line if appending impossible."""
+ nonlocal current_line
+ try:
+ current_line.append_safe(leaf, preformatted=True)
+ except ValueError:
+ yield current_line
+
+ current_line = Line(
+ line.mode, depth=line.depth, inside_brackets=line.inside_brackets
+ )
+ current_line.append(leaf)
+
+ for leaf in line.leaves:
+ yield from append_to_line(leaf)
+
+ for comment_after in line.comments_after(leaf):
+ yield from append_to_line(comment_after)
+
+ if current_line:
+ yield current_line
+
+
+def normalize_invisible_parens(
+ node: Node, parens_after: set[str], *, mode: Mode, features: Collection[Feature]
+) -> None:
+ """Make existing optional parentheses invisible or create new ones.
+
+ `parens_after` is a set of string leaf values immediately after which parens
+ should be put.
+
+ Standardizes on visible parentheses for single-element tuples, and keeps
+ existing visible parentheses for other tuples and generator expressions.
+ """
+ for pc in list_comments(node.prefix, is_endmarker=False, mode=mode):
+ if contains_fmt_directive(pc.value, FMT_OFF):
+ # This `node` has a prefix with `# fmt: off`, don't mess with parens.
+ return
+
+ # The multiple context managers grammar has a different pattern, thus this is
+ # separate from the for-loop below. This possibly wraps them in invisible parens,
+ # and later will be removed in remove_with_parens when needed.
+ if node.type == syms.with_stmt:
+ _maybe_wrap_cms_in_parens(node, mode, features)
+
+ check_lpar = False
+ for index, child in enumerate(list(node.children)):
+ # Fixes a bug where invisible parens are not properly stripped from
+ # assignment statements that contain type annotations.
+ if isinstance(child, Node) and child.type == syms.annassign:
+ normalize_invisible_parens(
+ child, parens_after=parens_after, mode=mode, features=features
+ )
+
+ # Fixes a bug where invisible parens are not properly wrapped around
+ # case blocks.
+ if isinstance(child, Node) and child.type == syms.case_block:
+ normalize_invisible_parens(
+ child, parens_after={"case"}, mode=mode, features=features
+ )
+
+ # Add parentheses around if guards in case blocks
+ if isinstance(child, Node) and child.type == syms.guard:
+ normalize_invisible_parens(
+ child, parens_after={"if"}, mode=mode, features=features
+ )
+
+ # Add parentheses around long tuple unpacking in assignments.
+ if (
+ index == 0
+ and isinstance(child, Node)
+ and child.type == syms.testlist_star_expr
+ ):
+ check_lpar = True
+
+ # Check for assignment LHS with preview feature enabled
+ if (
+ Preview.remove_parens_from_assignment_lhs in mode
+ and index == 0
+ and isinstance(child, Node)
+ and child.type == syms.atom
+ and node.type == syms.expr_stmt
+ and not _atom_has_magic_trailing_comma(child, mode)
+ and not _is_atom_multiline(child)
+ ):
+ if maybe_make_parens_invisible_in_atom(
+ child,
+ parent=node,
+ mode=mode,
+ features=features,
+ remove_brackets_around_comma=True,
+ allow_star_expr=True,
+ ):
+ wrap_in_parentheses(node, child, visible=False)
+
+ if check_lpar:
+ if (
+ child.type == syms.atom
+ and node.type == syms.for_stmt
+ and isinstance(child.prev_sibling, Leaf)
+ and child.prev_sibling.type == token.NAME
+ and child.prev_sibling.value == "for"
+ ):
+ if maybe_make_parens_invisible_in_atom(
+ child,
+ parent=node,
+ mode=mode,
+ features=features,
+ remove_brackets_around_comma=True,
+ ):
+ wrap_in_parentheses(node, child, visible=False)
+ elif isinstance(child, Node) and node.type == syms.with_stmt:
+ remove_with_parens(child, node, mode=mode, features=features)
+ elif child.type == syms.atom and not (
+ "in" in parens_after
+ and len(child.children) == 3
+ and is_lpar_token(child.children[0])
+ and is_rpar_token(child.children[-1])
+ and child.children[1].type == syms.test
+ ):
+ if maybe_make_parens_invisible_in_atom(
+ child, parent=node, mode=mode, features=features
+ ):
+ wrap_in_parentheses(node, child, visible=False)
+ elif is_one_tuple(child):
+ wrap_in_parentheses(node, child, visible=True)
+ elif node.type == syms.import_from:
+ _normalize_import_from(node, child, index)
+ break
+ elif (
+ index == 1
+ and child.type == token.STAR
+ and node.type == syms.except_clause
+ ):
+ # In except* (PEP 654), the star is actually part of
+ # of the keyword. So we need to skip the insertion of
+ # invisible parentheses to work more precisely.
+ continue
+
+ elif (
+ isinstance(child, Leaf)
+ and child.next_sibling is not None
+ and child.next_sibling.type == token.COLON
+ and child.value == "case"
+ ):
+ # A special patch for "case case:" scenario, the second occurrence
+ # of case will be not parsed as a Python keyword.
+ break
+
+ elif not is_multiline_string(child):
+ wrap_in_parentheses(node, child, visible=False)
+
+ comma_check = child.type == token.COMMA
+
+ check_lpar = isinstance(child, Leaf) and (
+ child.value in parens_after or comma_check
+ )
+
+
+def _normalize_import_from(parent: Node, child: LN, index: int) -> None:
+ # "import from" nodes store parentheses directly as part of
+ # the statement
+ if is_lpar_token(child):
+ assert is_rpar_token(parent.children[-1])
+ # make parentheses invisible
+ child.value = ""
+ parent.children[-1].value = ""
+ elif child.type != token.STAR:
+ # insert invisible parentheses
+ parent.insert_child(index, Leaf(token.LPAR, ""))
+ parent.append_child(Leaf(token.RPAR, ""))
+
+
+def remove_await_parens(node: Node, mode: Mode, features: Collection[Feature]) -> None:
+ if node.children[0].type == token.AWAIT and len(node.children) > 1:
+ if (
+ node.children[1].type == syms.atom
+ and node.children[1].children[0].type == token.LPAR
+ ):
+ if maybe_make_parens_invisible_in_atom(
+ node.children[1],
+ parent=node,
+ mode=mode,
+ features=features,
+ remove_brackets_around_comma=True,
+ ):
+ wrap_in_parentheses(node, node.children[1], visible=False)
+
+ # Since await is an expression we shouldn't remove
+ # brackets in cases where this would change
+ # the AST due to operator precedence.
+ # Therefore we only aim to remove brackets around
+ # power nodes that aren't also await expressions themselves.
+ # https://peps.python.org/pep-0492/#updated-operator-precedence-table
+ # N.B. We've still removed any redundant nested brackets though :)
+ opening_bracket = cast(Leaf, node.children[1].children[0])
+ closing_bracket = cast(Leaf, node.children[1].children[-1])
+ bracket_contents = node.children[1].children[1]
+ if isinstance(bracket_contents, Node) and (
+ bracket_contents.type != syms.power
+ or bracket_contents.children[0].type == token.AWAIT
+ or any(
+ isinstance(child, Leaf) and child.type == token.DOUBLESTAR
+ for child in bracket_contents.children
+ )
+ ):
+ ensure_visible(opening_bracket)
+ ensure_visible(closing_bracket)
+
+
+def _maybe_wrap_cms_in_parens(
+ node: Node, mode: Mode, features: Collection[Feature]
+) -> None:
+ """When enabled and safe, wrap the multiple context managers in invisible parens.
+
+ It is only safe when `features` contain Feature.PARENTHESIZED_CONTEXT_MANAGERS.
+ """
+ if (
+ Feature.PARENTHESIZED_CONTEXT_MANAGERS not in features
+ or len(node.children) <= 2
+ # If it's an atom, it's already wrapped in parens.
+ or node.children[1].type == syms.atom
+ ):
+ return
+ colon_index: int | None = None
+ for i in range(2, len(node.children)):
+ if node.children[i].type == token.COLON:
+ colon_index = i
+ break
+ if colon_index is not None:
+ lpar = Leaf(token.LPAR, "")
+ rpar = Leaf(token.RPAR, "")
+ context_managers = node.children[1:colon_index]
+ for child in context_managers:
+ child.remove()
+ # After wrapping, the with_stmt will look like this:
+ # with_stmt
+ # NAME 'with'
+ # atom
+ # LPAR ''
+ # testlist_gexp
+ # ... <-- context_managers
+ # /testlist_gexp
+ # RPAR ''
+ # /atom
+ # COLON ':'
+ new_child = Node(
+ syms.atom, [lpar, Node(syms.testlist_gexp, context_managers), rpar]
+ )
+ node.insert_child(1, new_child)
+
+
+def remove_with_parens(
+ node: Node, parent: Node, mode: Mode, features: Collection[Feature]
+) -> None:
+ """Recursively hide optional parens in `with` statements."""
+ # Removing all unnecessary parentheses in with statements in one pass is a tad
+ # complex as different variations of bracketed statements result in pretty
+ # different parse trees:
+ #
+ # with (open("file")) as f: # this is an asexpr_test
+ # ...
+ #
+ # with (open("file") as f): # this is an atom containing an
+ # ... # asexpr_test
+ #
+ # with (open("file")) as f, (open("file")) as f: # this is asexpr_test, COMMA,
+ # ... # asexpr_test
+ #
+ # with (open("file") as f, open("file") as f): # an atom containing a
+ # ... # testlist_gexp which then
+ # # contains multiple asexpr_test(s)
+ if node.type == syms.atom:
+ if maybe_make_parens_invisible_in_atom(
+ node,
+ parent=parent,
+ mode=mode,
+ features=features,
+ remove_brackets_around_comma=True,
+ ):
+ wrap_in_parentheses(parent, node, visible=False)
+ if isinstance(node.children[1], Node):
+ remove_with_parens(node.children[1], node, mode=mode, features=features)
+ elif node.type == syms.testlist_gexp:
+ for child in node.children:
+ if isinstance(child, Node):
+ remove_with_parens(child, node, mode=mode, features=features)
+ elif node.type == syms.asexpr_test and not any(
+ leaf.type == token.COLONEQUAL for leaf in node.leaves()
+ ):
+ if maybe_make_parens_invisible_in_atom(
+ node.children[0],
+ parent=node,
+ mode=mode,
+ features=features,
+ remove_brackets_around_comma=True,
+ ):
+ wrap_in_parentheses(node, node.children[0], visible=False)
+
+
+def _atom_has_magic_trailing_comma(node: LN, mode: Mode) -> bool:
+ """Check if an atom node has a magic trailing comma.
+
+ Returns True for single-element tuples with trailing commas like (a,),
+ which should be preserved to maintain their tuple type.
+ """
+ if not mode.magic_trailing_comma:
+ return False
+
+ return is_one_tuple(node)
+
+
+def _is_atom_multiline(node: LN) -> bool:
+ """Check if an atom node is multiline (indicating intentional formatting)."""
+ if not isinstance(node, Node) or len(node.children) < 3:
+ return False
+
+ # Check the middle child (between LPAR and RPAR) for newlines in its subtree
+ # The first child's prefix contains blank lines/comments before the opening paren
+ middle = node.children[1]
+ for child in middle.pre_order():
+ if isinstance(child, Leaf) and "\n" in child.prefix:
+ return True
+
+ return False
+
+
+def maybe_make_parens_invisible_in_atom(
+ node: LN,
+ parent: LN,
+ mode: Mode,
+ features: Collection[Feature],
+ remove_brackets_around_comma: bool = False,
+ allow_star_expr: bool = False,
+) -> bool:
+ """If it's safe, make the parens in the atom `node` invisible, recursively.
+ Additionally, remove repeated, adjacent invisible parens from the atom `node`
+ as they are redundant.
+
+ Returns whether the node should itself be wrapped in invisible parentheses.
+ """
+ if (
+ node.type not in (syms.atom, syms.expr)
+ or is_empty_tuple(node)
+ or is_one_tuple(node)
+ or (is_tuple(node) and parent.type == syms.asexpr_test)
+ or (
+ is_tuple(node)
+ and parent.type == syms.with_stmt
+ and has_sibling_with_type(node, token.COMMA)
+ )
+ or (is_yield(node) and parent.type != syms.expr_stmt)
+ or (
+ # This condition tries to prevent removing non-optional brackets
+ # around a tuple, however, can be a bit overzealous so we provide
+ # and option to skip this check for `for` and `with` statements.
+ not remove_brackets_around_comma
+ and max_delimiter_priority_in_atom(node) >= COMMA_PRIORITY
+ # Skip this check in Preview mode in order to
+ # Remove parentheses around multiple exception types in except and
+ # except* without as. See PEP 758 for details.
+ and not (
+ Preview.remove_parens_around_except_types in mode
+ and Feature.UNPARENTHESIZED_EXCEPT_TYPES in features
+ # is a tuple
+ and is_tuple(node)
+ # has a parent node
+ and node.parent is not None
+ # parent is an except clause
+ and node.parent.type == syms.except_clause
+ # is not immediately followed by as clause
+ and not (
+ node.next_sibling is not None
+ and is_name_token(node.next_sibling)
+ and node.next_sibling.value == "as"
+ )
+ )
+ )
+ or is_tuple_containing_walrus(node)
+ or (not allow_star_expr and is_tuple_containing_star(node))
+ or is_generator(node)
+ ):
+ return False
+
+ if is_walrus_assignment(node):
+ if parent.type in [
+ syms.annassign,
+ syms.expr_stmt,
+ syms.assert_stmt,
+ syms.return_stmt,
+ syms.except_clause,
+ syms.funcdef,
+ syms.with_stmt,
+ syms.testlist_gexp,
+ syms.tname,
+ # these ones aren't useful to end users, but they do please fuzzers
+ syms.for_stmt,
+ syms.del_stmt,
+ syms.for_stmt,
+ ]:
+ return False
+
+ first = node.children[0]
+ last = node.children[-1]
+ if is_lpar_token(first) and is_rpar_token(last):
+ middle = node.children[1]
+ # make parentheses invisible
+ if (
+ # If the prefix of `middle` includes a type comment with
+ # ignore annotation, then we do not remove the parentheses
+ not is_type_ignore_comment_string(middle.prefix.strip(), mode=mode)
+ ):
+ first.value = ""
+ last.value = ""
+ maybe_make_parens_invisible_in_atom(
+ middle,
+ parent=parent,
+ mode=mode,
+ features=features,
+ remove_brackets_around_comma=remove_brackets_around_comma,
+ )
+
+ if is_atom_with_invisible_parens(middle):
+ # Strip the invisible parens from `middle` by replacing
+ # it with the child in-between the invisible parens
+ middle.replace(middle.children[1])
+
+ if middle.children[0].prefix.strip():
+ # Preserve comments before first paren
+ middle.children[1].prefix = (
+ middle.children[0].prefix + middle.children[1].prefix
+ )
+
+ if middle.children[-1].prefix.strip():
+ # Preserve comments before last paren
+ last.prefix = middle.children[-1].prefix + last.prefix
+
+ return False
+
+ return True
+
+
+def should_split_line(line: Line, opening_bracket: Leaf) -> bool:
+ """Should `line` be immediately split with `delimiter_split()` after RHS?"""
+
+ if not (opening_bracket.parent and opening_bracket.value in "[{("):
+ return False
+
+ # We're essentially checking if the body is delimited by commas and there's more
+ # than one of them (we're excluding the trailing comma and if the delimiter priority
+ # is still commas, that means there's more).
+ exclude = set()
+ trailing_comma = False
+ try:
+ last_leaf = line.leaves[-1]
+ if last_leaf.type == token.COMMA:
+ trailing_comma = True
+ exclude.add(id(last_leaf))
+ max_priority = line.bracket_tracker.max_delimiter_priority(exclude=exclude)
+ except (IndexError, ValueError):
+ return False
+
+ return max_priority == COMMA_PRIORITY and (
+ (line.mode.magic_trailing_comma and trailing_comma)
+ # always explode imports
+ or opening_bracket.parent.type in {syms.atom, syms.import_from}
+ )
+
+
+def generate_trailers_to_omit(line: Line, line_length: int) -> Iterator[set[LeafID]]:
+ """Generate sets of closing bracket IDs that should be omitted in a RHS.
+
+ Brackets can be omitted if the entire trailer up to and including
+ a preceding closing bracket fits in one line.
+
+ Yielded sets are cumulative (contain results of previous yields, too). First
+ set is empty, unless the line should explode, in which case bracket pairs until
+ the one that needs to explode are omitted.
+ """
+
+ omit: set[LeafID] = set()
+ if not line.magic_trailing_comma:
+ yield omit
+
+ length = 4 * line.depth
+ opening_bracket: Leaf | None = None
+ closing_bracket: Leaf | None = None
+ inner_brackets: set[LeafID] = set()
+ for index, leaf, leaf_length in line.enumerate_with_length(is_reversed=True):
+ length += leaf_length
+ if length > line_length:
+ break
+
+ has_inline_comment = leaf_length > len(leaf.value) + len(leaf.prefix)
+ if leaf.type == STANDALONE_COMMENT or has_inline_comment:
+ break
+
+ if opening_bracket:
+ if leaf is opening_bracket:
+ opening_bracket = None
+ elif leaf.type in CLOSING_BRACKETS:
+ prev = line.leaves[index - 1] if index > 0 else None
+ if (
+ prev
+ and prev.type == token.COMMA
+ and leaf.opening_bracket is not None
+ and not is_one_sequence_between(
+ leaf.opening_bracket, leaf, line.leaves
+ )
+ ):
+ # Never omit bracket pairs with trailing commas.
+ # We need to explode on those.
+ break
+
+ inner_brackets.add(id(leaf))
+ elif leaf.type in CLOSING_BRACKETS:
+ prev = line.leaves[index - 1] if index > 0 else None
+ if prev and prev.type in OPENING_BRACKETS:
+ # Empty brackets would fail a split so treat them as "inner"
+ # brackets (e.g. only add them to the `omit` set if another
+ # pair of brackets was good enough.
+ inner_brackets.add(id(leaf))
+ continue
+
+ if closing_bracket:
+ omit.add(id(closing_bracket))
+ omit.update(inner_brackets)
+ inner_brackets.clear()
+ yield omit
+
+ if (
+ prev
+ and prev.type == token.COMMA
+ and leaf.opening_bracket is not None
+ and not is_one_sequence_between(leaf.opening_bracket, leaf, line.leaves)
+ ):
+ # Never omit bracket pairs with trailing commas.
+ # We need to explode on those.
+ break
+
+ if leaf.value:
+ opening_bracket = leaf.opening_bracket
+ closing_bracket = leaf
+
+
+def run_transformer(
+ line: Line,
+ transform: Transformer,
+ mode: Mode,
+ features: Collection[Feature],
+ *,
+ line_str: str = "",
+) -> list[Line]:
+ if not line_str:
+ line_str = line_to_string(line)
+ result: list[Line] = []
+ for transformed_line in transform(line, features, mode):
+ if str(transformed_line).strip("\n") == line_str:
+ raise CannotTransform("Line transformer returned an unchanged result")
+
+ result.extend(transform_line(transformed_line, mode=mode, features=features))
+
+ features_set = set(features)
+ if (
+ Feature.FORCE_OPTIONAL_PARENTHESES in features_set
+ or transform.__class__.__name__ != "rhs"
+ or not line.bracket_tracker.invisible
+ or any(bracket.value for bracket in line.bracket_tracker.invisible)
+ or line.contains_multiline_strings()
+ or result[0].contains_uncollapsable_type_comments()
+ or result[0].contains_unsplittable_type_ignore()
+ or is_line_short_enough(result[0], mode=mode)
+ # If any leaves have no parents (which _can_ occur since
+ # `transform(line)` potentially destroys the line's underlying node
+ # structure), then we can't proceed. Doing so would cause the below
+ # call to `append_leaves()` to fail.
+ or any(leaf.parent is None for leaf in line.leaves)
+ ):
+ return result
+
+ line_copy = line.clone()
+ append_leaves(line_copy, line, line.leaves)
+ features_fop = features_set | {Feature.FORCE_OPTIONAL_PARENTHESES}
+ second_opinion = run_transformer(
+ line_copy, transform, mode, features_fop, line_str=line_str
+ )
+ if all(is_line_short_enough(ln, mode=mode) for ln in second_opinion):
+ result = second_opinion
+ return result
diff --git a/py311/lib/python3.11/site-packages/black/lines.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/lines.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..cc4c66ed73c4687178af12ff648e7085608f6521
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/lines.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/lines.py b/py311/lib/python3.11/site-packages/black/lines.py
new file mode 100644
index 0000000000000000000000000000000000000000..09fce3a193d62b67be5d2480166d46270cd9dbe7
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/lines.py
@@ -0,0 +1,1101 @@
+import itertools
+import math
+from collections.abc import Callable, Iterator, Sequence
+from dataclasses import dataclass, field
+from typing import Optional, TypeVar, Union, cast
+
+from black.brackets import COMMA_PRIORITY, DOT_PRIORITY, BracketTracker
+from black.mode import Mode, Preview
+from black.nodes import (
+ BRACKETS,
+ CLOSING_BRACKETS,
+ OPENING_BRACKETS,
+ STANDALONE_COMMENT,
+ TEST_DESCENDANTS,
+ child_towards,
+ is_docstring,
+ is_import,
+ is_multiline_string,
+ is_one_sequence_between,
+ is_type_comment,
+ is_type_ignore_comment,
+ is_with_or_async_with_stmt,
+ make_simple_prefix,
+ replace_child,
+ syms,
+ whitespace,
+)
+from black.strings import str_width
+from blib2to3.pgen2 import token
+from blib2to3.pytree import Leaf, Node
+
+# types
+T = TypeVar("T")
+Index = int
+LeafID = int
+LN = Union[Leaf, Node]
+
+
+@dataclass
+class Line:
+ """Holds leaves and comments. Can be printed with `str(line)`."""
+
+ mode: Mode = field(repr=False)
+ depth: int = 0
+ leaves: list[Leaf] = field(default_factory=list)
+ # keys ordered like `leaves`
+ comments: dict[LeafID, list[Leaf]] = field(default_factory=dict)
+ bracket_tracker: BracketTracker = field(default_factory=BracketTracker)
+ inside_brackets: bool = False
+ should_split_rhs: bool = False
+ magic_trailing_comma: Leaf | None = None
+
+ def append(
+ self, leaf: Leaf, preformatted: bool = False, track_bracket: bool = False
+ ) -> None:
+ """Add a new `leaf` to the end of the line.
+
+ Unless `preformatted` is True, the `leaf` will receive a new consistent
+ whitespace prefix and metadata applied by :class:`BracketTracker`.
+ Trailing commas are maybe removed, unpacked for loop variables are
+ demoted from being delimiters.
+
+ Inline comments are put aside.
+ """
+ has_value = (
+ leaf.type in BRACKETS
+ # empty fstring and tstring middles must not be truncated
+ or leaf.type in (token.FSTRING_MIDDLE, token.TSTRING_MIDDLE)
+ or bool(leaf.value.strip())
+ )
+ if not has_value:
+ return
+
+ if leaf.type == token.COLON and self.is_class_paren_empty:
+ del self.leaves[-2:]
+ if self.leaves and not preformatted:
+ # Note: at this point leaf.prefix should be empty except for
+ # imports, for which we only preserve newlines.
+ leaf.prefix += whitespace(
+ leaf,
+ complex_subscript=self.is_complex_subscript(leaf),
+ mode=self.mode,
+ )
+ if self.inside_brackets or not preformatted or track_bracket:
+ self.bracket_tracker.mark(leaf)
+ if self.mode.magic_trailing_comma:
+ if self.has_magic_trailing_comma(leaf):
+ self.magic_trailing_comma = leaf
+ elif self.has_magic_trailing_comma(leaf):
+ self.remove_trailing_comma()
+ if not self.append_comment(leaf):
+ self.leaves.append(leaf)
+
+ def append_safe(self, leaf: Leaf, preformatted: bool = False) -> None:
+ """Like :func:`append()` but disallow invalid standalone comment structure.
+
+ Raises ValueError when any `leaf` is appended after a standalone comment
+ or when a standalone comment is not the first leaf on the line.
+ """
+ if (
+ self.bracket_tracker.depth == 0
+ or self.bracket_tracker.any_open_for_or_lambda()
+ ):
+ if self.is_comment:
+ raise ValueError("cannot append to standalone comments")
+
+ if self.leaves and leaf.type == STANDALONE_COMMENT:
+ raise ValueError(
+ "cannot append standalone comments to a populated line"
+ )
+
+ self.append(leaf, preformatted=preformatted)
+
+ @property
+ def is_comment(self) -> bool:
+ """Is this line a standalone comment?"""
+ return len(self.leaves) == 1 and self.leaves[0].type == STANDALONE_COMMENT
+
+ @property
+ def is_decorator(self) -> bool:
+ """Is this line a decorator?"""
+ return bool(self) and self.leaves[0].type == token.AT
+
+ @property
+ def is_import(self) -> bool:
+ """Is this an import line?"""
+ return bool(self) and is_import(self.leaves[0])
+
+ @property
+ def is_with_or_async_with_stmt(self) -> bool:
+ """Is this a with_stmt line?"""
+ return bool(self) and is_with_or_async_with_stmt(self.leaves[0])
+
+ @property
+ def is_class(self) -> bool:
+ """Is this line a class definition?"""
+ return (
+ bool(self)
+ and self.leaves[0].type == token.NAME
+ and self.leaves[0].value == "class"
+ )
+
+ @property
+ def is_stub_class(self) -> bool:
+ """Is this line a class definition with a body consisting only of "..."?"""
+ return self.is_class and self.leaves[-3:] == [
+ Leaf(token.DOT, ".") for _ in range(3)
+ ]
+
+ @property
+ def is_def(self) -> bool:
+ """Is this a function definition? (Also returns True for async defs.)"""
+ try:
+ first_leaf = self.leaves[0]
+ except IndexError:
+ return False
+
+ try:
+ second_leaf: Leaf | None = self.leaves[1]
+ except IndexError:
+ second_leaf = None
+ return (first_leaf.type == token.NAME and first_leaf.value == "def") or (
+ first_leaf.type == token.ASYNC
+ and second_leaf is not None
+ and second_leaf.type == token.NAME
+ and second_leaf.value == "def"
+ )
+
+ @property
+ def is_stub_def(self) -> bool:
+ """Is this line a function definition with a body consisting only of "..."?"""
+ return self.is_def and self.leaves[-4:] == [Leaf(token.COLON, ":")] + [
+ Leaf(token.DOT, ".") for _ in range(3)
+ ]
+
+ @property
+ def is_class_paren_empty(self) -> bool:
+ """Is this a class with no base classes but using parentheses?
+
+ Those are unnecessary and should be removed.
+ """
+ return (
+ bool(self)
+ and len(self.leaves) == 4
+ and self.is_class
+ and self.leaves[2].type == token.LPAR
+ and self.leaves[2].value == "("
+ and self.leaves[3].type == token.RPAR
+ and self.leaves[3].value == ")"
+ )
+
+ @property
+ def _is_triple_quoted_string(self) -> bool:
+ """Is the line a triple quoted string?"""
+ if not self or self.leaves[0].type != token.STRING:
+ return False
+ value = self.leaves[0].value
+ if value.startswith(('"""', "'''")):
+ return True
+ if value.startswith(("r'''", 'r"""', "R'''", 'R"""')):
+ return True
+ return False
+
+ @property
+ def is_docstring(self) -> bool:
+ """Is the line a docstring?"""
+ return bool(self) and is_docstring(self.leaves[0])
+
+ @property
+ def is_chained_assignment(self) -> bool:
+ """Is the line a chained assignment"""
+ return [leaf.type for leaf in self.leaves].count(token.EQUAL) > 1
+
+ @property
+ def opens_block(self) -> bool:
+ """Does this line open a new level of indentation."""
+ if len(self.leaves) == 0:
+ return False
+ return self.leaves[-1].type == token.COLON
+
+ def is_fmt_pass_converted(
+ self, *, first_leaf_matches: Callable[[Leaf], bool] | None = None
+ ) -> bool:
+ """Is this line converted from fmt off/skip code?
+
+ If first_leaf_matches is not None, it only returns True if the first
+ leaf of converted code matches.
+ """
+ if len(self.leaves) != 1:
+ return False
+ leaf = self.leaves[0]
+ if (
+ leaf.type != STANDALONE_COMMENT
+ or leaf.fmt_pass_converted_first_leaf is None
+ ):
+ return False
+ return first_leaf_matches is None or first_leaf_matches(
+ leaf.fmt_pass_converted_first_leaf
+ )
+
+ def contains_standalone_comments(self) -> bool:
+ """If so, needs to be split before emitting."""
+ for leaf in self.leaves:
+ if leaf.type == STANDALONE_COMMENT:
+ return True
+
+ return False
+
+ def contains_implicit_multiline_string_with_comments(self) -> bool:
+ """Chck if we have an implicit multiline string with comments on the line"""
+ for leaf_type, leaf_group_iterator in itertools.groupby(
+ self.leaves, lambda leaf: leaf.type
+ ):
+ if leaf_type != token.STRING:
+ continue
+ leaf_list = list(leaf_group_iterator)
+ if len(leaf_list) == 1:
+ continue
+ for leaf in leaf_list:
+ if self.comments_after(leaf):
+ return True
+ return False
+
+ def contains_uncollapsable_type_comments(self) -> bool:
+ ignored_ids = set()
+ try:
+ last_leaf = self.leaves[-1]
+ ignored_ids.add(id(last_leaf))
+ if last_leaf.type == token.COMMA or (
+ last_leaf.type == token.RPAR and not last_leaf.value
+ ):
+ # When trailing commas or optional parens are inserted by Black for
+ # consistency, comments after the previous last element are not moved
+ # (they don't have to, rendering will still be correct). So we ignore
+ # trailing commas and invisible.
+ last_leaf = self.leaves[-2]
+ ignored_ids.add(id(last_leaf))
+ except IndexError:
+ return False
+
+ # A type comment is uncollapsable if it is attached to a leaf
+ # that isn't at the end of the line (since that could cause it
+ # to get associated to a different argument) or if there are
+ # comments before it (since that could cause it to get hidden
+ # behind a comment.
+ comment_seen = False
+ for leaf_id, comments in self.comments.items():
+ for comment in comments:
+ if is_type_comment(comment, mode=self.mode):
+ if comment_seen or (
+ not is_type_ignore_comment(comment, mode=self.mode)
+ and leaf_id not in ignored_ids
+ ):
+ return True
+
+ comment_seen = True
+
+ return False
+
+ def contains_unsplittable_type_ignore(self) -> bool:
+ if not self.leaves:
+ return False
+
+ # If a 'type: ignore' is attached to the end of a line, we
+ # can't split the line, because we can't know which of the
+ # subexpressions the ignore was meant to apply to.
+ #
+ # We only want this to apply to actual physical lines from the
+ # original source, though: we don't want the presence of a
+ # 'type: ignore' at the end of a multiline expression to
+ # justify pushing it all onto one line. Thus we
+ # (unfortunately) need to check the actual source lines and
+ # only report an unsplittable 'type: ignore' if this line was
+ # one line in the original code.
+
+ # Grab the first and last line numbers, skipping generated leaves
+ first_line = next((leaf.lineno for leaf in self.leaves if leaf.lineno != 0), 0)
+ last_line = next(
+ (leaf.lineno for leaf in reversed(self.leaves) if leaf.lineno != 0), 0
+ )
+
+ if first_line == last_line:
+ # We look at the last two leaves since a comma or an
+ # invisible paren could have been added at the end of the
+ # line.
+ for node in self.leaves[-2:]:
+ for comment in self.comments.get(id(node), []):
+ if is_type_ignore_comment(comment, mode=self.mode):
+ return True
+
+ return False
+
+ def contains_multiline_strings(self) -> bool:
+ return any(is_multiline_string(leaf) for leaf in self.leaves)
+
+ def has_magic_trailing_comma(self, closing: Leaf) -> bool:
+ """Return True if we have a magic trailing comma, that is when:
+ - there's a trailing comma here
+ - it's not from single-element square bracket indexing
+ - it's not a one-tuple
+ """
+ if not (
+ closing.type in CLOSING_BRACKETS
+ and self.leaves
+ and self.leaves[-1].type == token.COMMA
+ ):
+ return False
+
+ if closing.type == token.RBRACE:
+ return True
+
+ if closing.type == token.RSQB:
+ if (
+ closing.parent is not None
+ and closing.parent.type == syms.trailer
+ and closing.opening_bracket is not None
+ and is_one_sequence_between(
+ closing.opening_bracket,
+ closing,
+ self.leaves,
+ brackets=(token.LSQB, token.RSQB),
+ )
+ ):
+ assert closing.prev_sibling is not None
+ assert closing.prev_sibling.type == syms.subscriptlist
+ return False
+
+ return True
+
+ if self.is_import:
+ return True
+
+ if closing.opening_bracket is not None and not is_one_sequence_between(
+ closing.opening_bracket, closing, self.leaves
+ ):
+ return True
+
+ return False
+
+ def append_comment(self, comment: Leaf) -> bool:
+ """Add an inline or standalone comment to the line."""
+ if (
+ comment.type == STANDALONE_COMMENT
+ and self.bracket_tracker.any_open_brackets()
+ ):
+ comment.prefix = ""
+ return False
+
+ if comment.type != token.COMMENT:
+ return False
+
+ if not self.leaves:
+ comment.type = STANDALONE_COMMENT
+ comment.prefix = ""
+ return False
+
+ last_leaf = self.leaves[-1]
+ if (
+ last_leaf.type == token.RPAR
+ and not last_leaf.value
+ and last_leaf.parent
+ and len(list(last_leaf.parent.leaves())) <= 3
+ and not is_type_comment(comment, mode=self.mode)
+ ):
+ # Comments on an optional parens wrapping a single leaf should belong to
+ # the wrapped node except if it's a type comment. Pinning the comment like
+ # this avoids unstable formatting caused by comment migration.
+ if len(self.leaves) < 2:
+ comment.type = STANDALONE_COMMENT
+ comment.prefix = ""
+ return False
+
+ last_leaf = self.leaves[-2]
+ self.comments.setdefault(id(last_leaf), []).append(comment)
+ return True
+
+ def comments_after(self, leaf: Leaf) -> list[Leaf]:
+ """Generate comments that should appear directly after `leaf`."""
+ return self.comments.get(id(leaf), [])
+
+ def remove_trailing_comma(self) -> None:
+ """Remove the trailing comma and moves the comments attached to it."""
+ trailing_comma = self.leaves.pop()
+ trailing_comma_comments = self.comments.pop(id(trailing_comma), [])
+ self.comments.setdefault(id(self.leaves[-1]), []).extend(
+ trailing_comma_comments
+ )
+
+ def is_complex_subscript(self, leaf: Leaf) -> bool:
+ """Return True iff `leaf` is part of a slice with non-trivial exprs."""
+ open_lsqb = self.bracket_tracker.get_open_lsqb()
+ if open_lsqb is None:
+ return False
+
+ subscript_start = open_lsqb.next_sibling
+
+ if isinstance(subscript_start, Node):
+ if subscript_start.type == syms.listmaker:
+ return False
+
+ if subscript_start.type == syms.subscriptlist:
+ subscript_start = child_towards(subscript_start, leaf)
+
+ return subscript_start is not None and any(
+ n.type in TEST_DESCENDANTS for n in subscript_start.pre_order()
+ )
+
+ def enumerate_with_length(
+ self, is_reversed: bool = False
+ ) -> Iterator[tuple[Index, Leaf, int]]:
+ """Return an enumeration of leaves with their length.
+
+ Stops prematurely on multiline strings and standalone comments.
+ """
+ op = cast(
+ Callable[[Sequence[Leaf]], Iterator[tuple[Index, Leaf]]],
+ enumerate_reversed if is_reversed else enumerate,
+ )
+ for index, leaf in op(self.leaves):
+ length = len(leaf.prefix) + len(leaf.value)
+ if "\n" in leaf.value:
+ return # Multiline strings, we can't continue.
+
+ for comment in self.comments_after(leaf):
+ length += len(comment.value)
+
+ yield index, leaf, length
+
+ def clone(self) -> "Line":
+ return Line(
+ mode=self.mode,
+ depth=self.depth,
+ inside_brackets=self.inside_brackets,
+ should_split_rhs=self.should_split_rhs,
+ magic_trailing_comma=self.magic_trailing_comma,
+ )
+
+ def __str__(self) -> str:
+ """Render the line."""
+ if not self:
+ return "\n"
+
+ indent = " " * self.depth
+ leaves = iter(self.leaves)
+ first = next(leaves)
+ res = f"{first.prefix}{indent}{first.value}"
+ res += "".join(str(leaf) for leaf in leaves)
+ comments_iter = itertools.chain.from_iterable(self.comments.values())
+ comments = [str(comment) for comment in comments_iter]
+ res += "".join(comments)
+
+ return res + "\n"
+
+ def __bool__(self) -> bool:
+ """Return True if the line has leaves or comments."""
+ return bool(self.leaves or self.comments)
+
+
+@dataclass
+class RHSResult:
+ """Intermediate split result from a right hand split."""
+
+ head: Line
+ body: Line
+ tail: Line
+ opening_bracket: Leaf
+ closing_bracket: Leaf
+
+
+@dataclass
+class LinesBlock:
+ """Class that holds information about a block of formatted lines.
+
+ This is introduced so that the EmptyLineTracker can look behind the standalone
+ comments and adjust their empty lines for class or def lines.
+ """
+
+ mode: Mode
+ previous_block: Optional["LinesBlock"]
+ original_line: Line
+ before: int = 0
+ content_lines: list[str] = field(default_factory=list)
+ after: int = 0
+ form_feed: bool = False
+
+ def all_lines(self) -> list[str]:
+ empty_line = str(Line(mode=self.mode))
+ prefix = make_simple_prefix(self.before, self.form_feed, empty_line)
+ return [prefix] + self.content_lines + [empty_line * self.after]
+
+
+@dataclass
+class EmptyLineTracker:
+ """Provides a stateful method that returns the number of potential extra
+ empty lines needed before and after the currently processed line.
+
+ Note: this tracker works on lines that haven't been split yet. It assumes
+ the prefix of the first leaf consists of optional newlines. Those newlines
+ are consumed by `maybe_empty_lines()` and included in the computation.
+ """
+
+ mode: Mode
+ previous_line: Line | None = None
+ previous_block: LinesBlock | None = None
+ previous_defs: list[Line] = field(default_factory=list)
+ semantic_leading_comment: LinesBlock | None = None
+
+ def maybe_empty_lines(self, current_line: Line) -> LinesBlock:
+ """Return the number of extra empty lines before and after the `current_line`.
+
+ This is for separating `def`, `async def` and `class` with extra empty
+ lines (two on module-level).
+ """
+ form_feed = (
+ current_line.depth == 0
+ and bool(current_line.leaves)
+ and "\f\n" in current_line.leaves[0].prefix
+ )
+ before, after = self._maybe_empty_lines(current_line)
+ previous_after = self.previous_block.after if self.previous_block else 0
+ before = max(0, before - previous_after)
+ if Preview.fix_module_docstring_detection in self.mode:
+ # Always have one empty line after a module docstring
+ if self._line_is_module_docstring(current_line):
+ before = 1
+ else:
+ if (
+ # Always have one empty line after a module docstring
+ self.previous_block
+ and self.previous_block.previous_block is None
+ and len(self.previous_block.original_line.leaves) == 1
+ and self.previous_block.original_line.is_docstring
+ and not (current_line.is_class or current_line.is_def)
+ ):
+ before = 1
+
+ block = LinesBlock(
+ mode=self.mode,
+ previous_block=self.previous_block,
+ original_line=current_line,
+ before=before,
+ after=after,
+ form_feed=form_feed,
+ )
+
+ # Maintain the semantic_leading_comment state.
+ if current_line.is_comment:
+ if self.previous_line is None or (
+ not self.previous_line.is_decorator
+ # `or before` means this comment already has an empty line before
+ and (not self.previous_line.is_comment or before)
+ and (self.semantic_leading_comment is None or before)
+ ):
+ self.semantic_leading_comment = block
+ # `or before` means this decorator already has an empty line before
+ elif not current_line.is_decorator or before:
+ self.semantic_leading_comment = None
+
+ self.previous_line = current_line
+ self.previous_block = block
+ return block
+
+ def _line_is_module_docstring(self, current_line: Line) -> bool:
+ previous_block = self.previous_block
+ if not previous_block:
+ return False
+ if (
+ len(previous_block.original_line.leaves) != 1
+ or not previous_block.original_line.is_docstring
+ or current_line.is_class
+ or current_line.is_def
+ ):
+ return False
+ while previous_block := previous_block.previous_block:
+ if not previous_block.original_line.is_comment:
+ return False
+ return True
+
+ def _maybe_empty_lines(self, current_line: Line) -> tuple[int, int]:
+ max_allowed = 1
+ if current_line.depth == 0:
+ max_allowed = 1 if self.mode.is_pyi else 2
+
+ if current_line.leaves:
+ # Consume the first leaf's extra newlines.
+ first_leaf = current_line.leaves[0]
+ before = first_leaf.prefix.count("\n")
+ before = min(before, max_allowed)
+ first_leaf.prefix = ""
+ else:
+ before = 0
+
+ user_had_newline = bool(before)
+ depth = current_line.depth
+
+ # Mutate self.previous_defs, remainder of this function should be pure
+ previous_def = None
+ while self.previous_defs and self.previous_defs[-1].depth >= depth:
+ previous_def = self.previous_defs.pop()
+ if current_line.is_def or current_line.is_class:
+ self.previous_defs.append(current_line)
+
+ if self.previous_line is None:
+ # Don't insert empty lines before the first line in the file.
+ return 0, 0
+
+ if current_line.is_docstring:
+ if self.previous_line.is_class:
+ return 0, 1
+ if self.previous_line.opens_block and self.previous_line.is_def:
+ return 0, 0
+
+ if previous_def is not None:
+ assert self.previous_line is not None
+ if self.mode.is_pyi:
+ if previous_def.is_class and not previous_def.is_stub_class:
+ before = 1
+ elif depth and not current_line.is_def and self.previous_line.is_def:
+ # Empty lines between attributes and methods should be preserved.
+ before = 1 if user_had_newline else 0
+ elif depth:
+ before = 0
+ else:
+ before = 1
+ else:
+ if depth:
+ before = 1
+ elif (
+ not depth
+ and previous_def.depth
+ and current_line.leaves[-1].type == token.COLON
+ and (
+ current_line.leaves[0].value
+ not in ("with", "try", "for", "while", "if", "match")
+ )
+ ):
+ # We shouldn't add two newlines between an indented function and
+ # a dependent non-indented clause. This is to avoid issues with
+ # conditional function definitions that are technically top-level
+ # and therefore get two trailing newlines, but look weird and
+ # inconsistent when they're followed by elif, else, etc. This is
+ # worse because these functions only get *one* preceding newline
+ # already.
+ before = 1
+ else:
+ before = 2
+
+ if current_line.is_decorator or current_line.is_def or current_line.is_class:
+ return self._maybe_empty_lines_for_class_or_def(
+ current_line, before, user_had_newline
+ )
+
+ if (
+ self.previous_line.is_import
+ and self.previous_line.depth == 0
+ and current_line.depth == 0
+ and not current_line.is_import
+ and not current_line.is_fmt_pass_converted(first_leaf_matches=is_import)
+ and Preview.always_one_newline_after_import in self.mode
+ ):
+ return 1, 0
+
+ if (
+ self.previous_line.is_import
+ and not current_line.is_import
+ and not current_line.is_fmt_pass_converted(first_leaf_matches=is_import)
+ and depth == self.previous_line.depth
+ ):
+ return (before or 1), 0
+
+ return before, 0
+
+ def _maybe_empty_lines_for_class_or_def(
+ self, current_line: Line, before: int, user_had_newline: bool
+ ) -> tuple[int, int]:
+ assert self.previous_line is not None
+
+ if self.previous_line.is_decorator:
+ if self.mode.is_pyi and current_line.is_stub_class:
+ # Insert an empty line after a decorated stub class
+ return 0, 1
+ return 0, 0
+
+ if self.previous_line.depth < current_line.depth and (
+ self.previous_line.is_class or self.previous_line.is_def
+ ):
+ if self.mode.is_pyi:
+ return 0, 0
+ return 1 if user_had_newline else 0, 0
+
+ comment_to_add_newlines: LinesBlock | None = None
+ if (
+ self.previous_line.is_comment
+ and self.previous_line.depth == current_line.depth
+ and before == 0
+ ):
+ slc = self.semantic_leading_comment
+ if (
+ slc is not None
+ and slc.previous_block is not None
+ and not slc.previous_block.original_line.is_class
+ and not slc.previous_block.original_line.opens_block
+ and slc.before <= 1
+ ):
+ comment_to_add_newlines = slc
+ else:
+ return 0, 0
+
+ if self.mode.is_pyi:
+ if current_line.is_class or self.previous_line.is_class:
+ if self.previous_line.depth < current_line.depth:
+ newlines = 0
+ elif self.previous_line.depth > current_line.depth:
+ newlines = 1
+ elif current_line.is_stub_class and self.previous_line.is_stub_class:
+ # No blank line between classes with an empty body
+ newlines = 0
+ else:
+ newlines = 1
+ # Don't inspect the previous line if it's part of the body of the previous
+ # statement in the same level, we always want a blank line if there's
+ # something with a body preceding.
+ elif self.previous_line.depth > current_line.depth:
+ newlines = 1
+ elif (
+ current_line.is_def or current_line.is_decorator
+ ) and not self.previous_line.is_def:
+ if current_line.depth:
+ # In classes empty lines between attributes and methods should
+ # be preserved.
+ newlines = min(1, before)
+ else:
+ # Blank line between a block of functions (maybe with preceding
+ # decorators) and a block of non-functions
+ newlines = 1
+ else:
+ newlines = 0
+ else:
+ newlines = 1 if current_line.depth else 2
+ # If a user has left no space after a dummy implementation, don't insert
+ # new lines. This is useful for instance for @overload or Protocols.
+ if self.previous_line.is_stub_def and not user_had_newline:
+ newlines = 0
+ if comment_to_add_newlines is not None:
+ previous_block = comment_to_add_newlines.previous_block
+ if previous_block is not None:
+ comment_to_add_newlines.before = (
+ max(comment_to_add_newlines.before, newlines) - previous_block.after
+ )
+ newlines = 0
+ return newlines, 0
+
+
+def enumerate_reversed(sequence: Sequence[T]) -> Iterator[tuple[Index, T]]:
+ """Like `reversed(enumerate(sequence))` if that were possible."""
+ index = len(sequence) - 1
+ for element in reversed(sequence):
+ yield (index, element)
+ index -= 1
+
+
+def append_leaves(
+ new_line: Line, old_line: Line, leaves: list[Leaf], preformatted: bool = False
+) -> None:
+ """
+ Append leaves (taken from @old_line) to @new_line, making sure to fix the
+ underlying Node structure where appropriate.
+
+ All of the leaves in @leaves are duplicated. The duplicates are then
+ appended to @new_line and used to replace their originals in the underlying
+ Node structure. Any comments attached to the old leaves are reattached to
+ the new leaves.
+
+ Pre-conditions:
+ set(@leaves) is a subset of set(@old_line.leaves).
+ """
+ for old_leaf in leaves:
+ new_leaf = Leaf(old_leaf.type, old_leaf.value)
+ replace_child(old_leaf, new_leaf)
+ new_line.append(new_leaf, preformatted=preformatted)
+
+ for comment_leaf in old_line.comments_after(old_leaf):
+ new_line.append(comment_leaf, preformatted=True)
+
+
+def is_line_short_enough(line: Line, *, mode: Mode, line_str: str = "") -> bool:
+ """For non-multiline strings, return True if `line` is no longer than `line_length`.
+ For multiline strings, looks at the context around `line` to determine
+ if it should be inlined or split up.
+ Uses the provided `line_str` rendering, if any, otherwise computes a new one.
+ """
+ if not line_str:
+ line_str = line_to_string(line)
+
+ if Preview.multiline_string_handling not in mode:
+ return (
+ str_width(line_str) <= mode.line_length
+ and "\n" not in line_str # multiline strings
+ and not line.contains_standalone_comments()
+ )
+
+ if line.contains_standalone_comments():
+ return False
+ if "\n" not in line_str:
+ # No multiline strings (MLS) present
+ return str_width(line_str) <= mode.line_length
+
+ first, *_, last = line_str.split("\n")
+ if str_width(first) > mode.line_length or str_width(last) > mode.line_length:
+ return False
+
+ # Traverse the AST to examine the context of the multiline string (MLS),
+ # tracking aspects such as depth and comma existence,
+ # to determine whether to split the MLS or keep it together.
+ # Depth (which is based on the existing bracket_depth concept)
+ # is needed to determine nesting level of the MLS.
+ # Includes special case for trailing commas.
+ commas: list[int] = [] # tracks number of commas per depth level
+ multiline_string: Leaf | None = None
+ # store the leaves that contain parts of the MLS
+ multiline_string_contexts: list[LN] = []
+
+ max_level_to_update: int | float = math.inf # track the depth of the MLS
+ for i, leaf in enumerate(line.leaves):
+ if max_level_to_update == math.inf:
+ had_comma: int | None = None
+ if leaf.bracket_depth + 1 > len(commas):
+ commas.append(0)
+ elif leaf.bracket_depth + 1 < len(commas):
+ had_comma = commas.pop()
+ if (
+ had_comma is not None
+ and multiline_string is not None
+ and multiline_string.bracket_depth == leaf.bracket_depth + 1
+ ):
+ # Have left the level with the MLS, stop tracking commas
+ max_level_to_update = leaf.bracket_depth
+ if had_comma > 0:
+ # MLS was in parens with at least one comma - force split
+ return False
+
+ if leaf.bracket_depth <= max_level_to_update and leaf.type == token.COMMA:
+ # Inside brackets, ignore trailing comma
+ # directly after MLS/MLS-containing expression
+ ignore_ctxs: list[LN | None] = [None]
+ ignore_ctxs += multiline_string_contexts
+ if (line.inside_brackets or leaf.bracket_depth > 0) and (
+ i != len(line.leaves) - 1 or leaf.prev_sibling not in ignore_ctxs
+ ):
+ commas[leaf.bracket_depth] += 1
+ if max_level_to_update != math.inf:
+ max_level_to_update = min(max_level_to_update, leaf.bracket_depth)
+
+ if is_multiline_string(leaf):
+ if leaf.parent and (
+ leaf.parent.type == syms.test
+ or (leaf.parent.parent and leaf.parent.parent.type == syms.dictsetmaker)
+ ):
+ # Keep ternary and dictionary values parenthesized
+ return False
+ if len(multiline_string_contexts) > 0:
+ # >1 multiline string cannot fit on a single line - force split
+ return False
+ multiline_string = leaf
+ ctx: LN = leaf
+ # fetch the leaf components of the MLS in the AST
+ while str(ctx) in line_str:
+ multiline_string_contexts.append(ctx)
+ if ctx.parent is None:
+ break
+ ctx = ctx.parent
+
+ # May not have a triple-quoted multiline string at all,
+ # in case of a regular string with embedded newlines and line continuations
+ if len(multiline_string_contexts) == 0:
+ return True
+
+ return all(val == 0 for val in commas)
+
+
+def can_be_split(line: Line) -> bool:
+ """Return False if the line cannot be split *for sure*.
+
+ This is not an exhaustive search but a cheap heuristic that we can use to
+ avoid some unfortunate formattings (mostly around wrapping unsplittable code
+ in unnecessary parentheses).
+ """
+ leaves = line.leaves
+ if len(leaves) < 2:
+ return False
+
+ if leaves[0].type == token.STRING and leaves[1].type == token.DOT:
+ call_count = 0
+ dot_count = 0
+ next = leaves[-1]
+ for leaf in leaves[-2::-1]:
+ if leaf.type in OPENING_BRACKETS:
+ if next.type not in CLOSING_BRACKETS:
+ return False
+
+ call_count += 1
+ elif leaf.type == token.DOT:
+ dot_count += 1
+ elif leaf.type == token.NAME:
+ if not (next.type == token.DOT or next.type in OPENING_BRACKETS):
+ return False
+
+ elif leaf.type not in CLOSING_BRACKETS:
+ return False
+
+ if dot_count > 1 and call_count > 1:
+ return False
+
+ return True
+
+
+def can_omit_invisible_parens(
+ rhs: RHSResult,
+ line_length: int,
+) -> bool:
+ """Does `rhs.body` have a shape safe to reformat without optional parens around it?
+
+ Returns True for only a subset of potentially nice looking formattings but
+ the point is to not return false positives that end up producing lines that
+ are too long.
+ """
+ line = rhs.body
+
+ # We need optional parens in order to split standalone comments to their own lines
+ # if there are no nested parens around the standalone comments
+ closing_bracket: Leaf | None = None
+ for leaf in reversed(line.leaves):
+ if closing_bracket and leaf is closing_bracket.opening_bracket:
+ closing_bracket = None
+ if leaf.type == STANDALONE_COMMENT and not closing_bracket:
+ return False
+ if (
+ not closing_bracket
+ and leaf.type in CLOSING_BRACKETS
+ and leaf.opening_bracket in line.leaves
+ and leaf.value
+ ):
+ closing_bracket = leaf
+
+ bt = line.bracket_tracker
+ if not bt.delimiters:
+ # Without delimiters the optional parentheses are useless.
+ return True
+
+ max_priority = bt.max_delimiter_priority()
+ delimiter_count = bt.delimiter_count_with_priority(max_priority)
+ if delimiter_count > 1:
+ # With more than one delimiter of a kind the optional parentheses read better.
+ return False
+
+ if delimiter_count == 1:
+ if max_priority == COMMA_PRIORITY and rhs.head.is_with_or_async_with_stmt:
+ # For two context manager with statements, the optional parentheses read
+ # better. In this case, `rhs.body` is the context managers part of
+ # the with statement. `rhs.head` is the `with (` part on the previous
+ # line.
+ return False
+ # Otherwise it may also read better, but we don't do it today and requires
+ # careful considerations for all possible cases. See
+ # https://github.com/psf/black/issues/2156.
+
+ if max_priority == DOT_PRIORITY:
+ # A single stranded method call doesn't require optional parentheses.
+ return True
+
+ assert len(line.leaves) >= 2, "Stranded delimiter"
+
+ # With a single delimiter, omit if the expression starts or ends with
+ # a bracket.
+ first = line.leaves[0]
+ second = line.leaves[1]
+ if first.type in OPENING_BRACKETS and second.type not in CLOSING_BRACKETS:
+ if _can_omit_opening_paren(line, first=first, line_length=line_length):
+ return True
+
+ # Note: we are not returning False here because a line might have *both*
+ # a leading opening bracket and a trailing closing bracket. If the
+ # opening bracket doesn't match our rule, maybe the closing will.
+
+ penultimate = line.leaves[-2]
+ last = line.leaves[-1]
+
+ if (
+ last.type == token.RPAR
+ or last.type == token.RBRACE
+ or (
+ # don't use indexing for omitting optional parentheses;
+ # it looks weird
+ last.type == token.RSQB
+ and last.parent
+ and last.parent.type != syms.trailer
+ )
+ ):
+ if penultimate.type in OPENING_BRACKETS:
+ # Empty brackets don't help.
+ return False
+
+ if is_multiline_string(first):
+ # Additional wrapping of a multiline string in this situation is
+ # unnecessary.
+ return True
+
+ if _can_omit_closing_paren(line, last=last, line_length=line_length):
+ return True
+
+ return False
+
+
+def _can_omit_opening_paren(line: Line, *, first: Leaf, line_length: int) -> bool:
+ """See `can_omit_invisible_parens`."""
+ remainder = False
+ length = 4 * line.depth
+ _index = -1
+ for _index, leaf, leaf_length in line.enumerate_with_length():
+ if leaf.type in CLOSING_BRACKETS and leaf.opening_bracket is first:
+ remainder = True
+ if remainder:
+ length += leaf_length
+ if length > line_length:
+ break
+
+ if leaf.type in OPENING_BRACKETS:
+ # There are brackets we can further split on.
+ remainder = False
+
+ else:
+ # checked the entire string and line length wasn't exceeded
+ if len(line.leaves) == _index + 1:
+ return True
+
+ return False
+
+
+def _can_omit_closing_paren(line: Line, *, last: Leaf, line_length: int) -> bool:
+ """See `can_omit_invisible_parens`."""
+ length = 4 * line.depth
+ seen_other_brackets = False
+ for _index, leaf, leaf_length in line.enumerate_with_length():
+ length += leaf_length
+ if leaf is last.opening_bracket:
+ if seen_other_brackets or length <= line_length:
+ return True
+
+ elif leaf.type in OPENING_BRACKETS:
+ # There are brackets we can further split on.
+ seen_other_brackets = True
+
+ return False
+
+
+def line_to_string(line: Line) -> str:
+ """Returns the string representation of @line.
+
+ WARNING: This is known to be computationally expensive.
+ """
+ return str(line).strip("\n")
diff --git a/py311/lib/python3.11/site-packages/black/mode.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/mode.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..8e5b51fb8bd906ce7df9fb504340725ac3cfc0e8
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/mode.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/mode.py b/py311/lib/python3.11/site-packages/black/mode.py
new file mode 100644
index 0000000000000000000000000000000000000000..702f580e97963f2352c875fa41f9f00df02ec7d7
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/mode.py
@@ -0,0 +1,335 @@
+"""Data structures configuring Black behavior.
+
+Mostly around Python language feature support per version and Black configuration
+chosen by the user.
+"""
+
+from dataclasses import dataclass, field
+from enum import Enum, auto
+from hashlib import sha256
+from operator import attrgetter
+from typing import Final
+
+from black.const import DEFAULT_LINE_LENGTH
+
+
+class TargetVersion(Enum):
+ PY33 = 3
+ PY34 = 4
+ PY35 = 5
+ PY36 = 6
+ PY37 = 7
+ PY38 = 8
+ PY39 = 9
+ PY310 = 10
+ PY311 = 11
+ PY312 = 12
+ PY313 = 13
+ PY314 = 14
+
+ def pretty(self) -> str:
+ assert self.name[:2] == "PY"
+ return f"Python {self.name[2]}.{self.name[3:]}"
+
+
+class Feature(Enum):
+ F_STRINGS = 2
+ NUMERIC_UNDERSCORES = 3
+ TRAILING_COMMA_IN_CALL = 4
+ TRAILING_COMMA_IN_DEF = 5
+ # The following two feature-flags are mutually exclusive, and exactly one should be
+ # set for every version of python.
+ ASYNC_IDENTIFIERS = 6
+ ASYNC_KEYWORDS = 7
+ ASSIGNMENT_EXPRESSIONS = 8
+ POS_ONLY_ARGUMENTS = 9
+ RELAXED_DECORATORS = 10
+ PATTERN_MATCHING = 11
+ UNPACKING_ON_FLOW = 12
+ ANN_ASSIGN_EXTENDED_RHS = 13
+ EXCEPT_STAR = 14
+ VARIADIC_GENERICS = 15
+ DEBUG_F_STRINGS = 16
+ PARENTHESIZED_CONTEXT_MANAGERS = 17
+ TYPE_PARAMS = 18
+ # FSTRING_PARSING = 19 # unused
+ TYPE_PARAM_DEFAULTS = 20
+ UNPARENTHESIZED_EXCEPT_TYPES = 21
+ T_STRINGS = 22
+ FORCE_OPTIONAL_PARENTHESES = 50
+
+ # __future__ flags
+ FUTURE_ANNOTATIONS = 51
+
+
+FUTURE_FLAG_TO_FEATURE: Final = {
+ "annotations": Feature.FUTURE_ANNOTATIONS,
+}
+
+
+VERSION_TO_FEATURES: dict[TargetVersion, set[Feature]] = {
+ TargetVersion.PY33: {Feature.ASYNC_IDENTIFIERS},
+ TargetVersion.PY34: {Feature.ASYNC_IDENTIFIERS},
+ TargetVersion.PY35: {Feature.TRAILING_COMMA_IN_CALL, Feature.ASYNC_IDENTIFIERS},
+ TargetVersion.PY36: {
+ Feature.F_STRINGS,
+ Feature.NUMERIC_UNDERSCORES,
+ Feature.TRAILING_COMMA_IN_CALL,
+ Feature.TRAILING_COMMA_IN_DEF,
+ Feature.ASYNC_IDENTIFIERS,
+ },
+ TargetVersion.PY37: {
+ Feature.F_STRINGS,
+ Feature.NUMERIC_UNDERSCORES,
+ Feature.TRAILING_COMMA_IN_CALL,
+ Feature.TRAILING_COMMA_IN_DEF,
+ Feature.ASYNC_KEYWORDS,
+ Feature.FUTURE_ANNOTATIONS,
+ },
+ TargetVersion.PY38: {
+ Feature.F_STRINGS,
+ Feature.DEBUG_F_STRINGS,
+ Feature.NUMERIC_UNDERSCORES,
+ Feature.TRAILING_COMMA_IN_CALL,
+ Feature.TRAILING_COMMA_IN_DEF,
+ Feature.ASYNC_KEYWORDS,
+ Feature.FUTURE_ANNOTATIONS,
+ Feature.ASSIGNMENT_EXPRESSIONS,
+ Feature.POS_ONLY_ARGUMENTS,
+ Feature.UNPACKING_ON_FLOW,
+ Feature.ANN_ASSIGN_EXTENDED_RHS,
+ },
+ TargetVersion.PY39: {
+ Feature.F_STRINGS,
+ Feature.DEBUG_F_STRINGS,
+ Feature.NUMERIC_UNDERSCORES,
+ Feature.TRAILING_COMMA_IN_CALL,
+ Feature.TRAILING_COMMA_IN_DEF,
+ Feature.ASYNC_KEYWORDS,
+ Feature.FUTURE_ANNOTATIONS,
+ Feature.ASSIGNMENT_EXPRESSIONS,
+ Feature.RELAXED_DECORATORS,
+ Feature.POS_ONLY_ARGUMENTS,
+ Feature.UNPACKING_ON_FLOW,
+ Feature.ANN_ASSIGN_EXTENDED_RHS,
+ Feature.PARENTHESIZED_CONTEXT_MANAGERS,
+ },
+ TargetVersion.PY310: {
+ Feature.F_STRINGS,
+ Feature.DEBUG_F_STRINGS,
+ Feature.NUMERIC_UNDERSCORES,
+ Feature.TRAILING_COMMA_IN_CALL,
+ Feature.TRAILING_COMMA_IN_DEF,
+ Feature.ASYNC_KEYWORDS,
+ Feature.FUTURE_ANNOTATIONS,
+ Feature.ASSIGNMENT_EXPRESSIONS,
+ Feature.RELAXED_DECORATORS,
+ Feature.POS_ONLY_ARGUMENTS,
+ Feature.UNPACKING_ON_FLOW,
+ Feature.ANN_ASSIGN_EXTENDED_RHS,
+ Feature.PARENTHESIZED_CONTEXT_MANAGERS,
+ Feature.PATTERN_MATCHING,
+ },
+ TargetVersion.PY311: {
+ Feature.F_STRINGS,
+ Feature.DEBUG_F_STRINGS,
+ Feature.NUMERIC_UNDERSCORES,
+ Feature.TRAILING_COMMA_IN_CALL,
+ Feature.TRAILING_COMMA_IN_DEF,
+ Feature.ASYNC_KEYWORDS,
+ Feature.FUTURE_ANNOTATIONS,
+ Feature.ASSIGNMENT_EXPRESSIONS,
+ Feature.RELAXED_DECORATORS,
+ Feature.POS_ONLY_ARGUMENTS,
+ Feature.UNPACKING_ON_FLOW,
+ Feature.ANN_ASSIGN_EXTENDED_RHS,
+ Feature.PARENTHESIZED_CONTEXT_MANAGERS,
+ Feature.PATTERN_MATCHING,
+ Feature.EXCEPT_STAR,
+ Feature.VARIADIC_GENERICS,
+ },
+ TargetVersion.PY312: {
+ Feature.F_STRINGS,
+ Feature.DEBUG_F_STRINGS,
+ Feature.NUMERIC_UNDERSCORES,
+ Feature.TRAILING_COMMA_IN_CALL,
+ Feature.TRAILING_COMMA_IN_DEF,
+ Feature.ASYNC_KEYWORDS,
+ Feature.FUTURE_ANNOTATIONS,
+ Feature.ASSIGNMENT_EXPRESSIONS,
+ Feature.RELAXED_DECORATORS,
+ Feature.POS_ONLY_ARGUMENTS,
+ Feature.UNPACKING_ON_FLOW,
+ Feature.ANN_ASSIGN_EXTENDED_RHS,
+ Feature.PARENTHESIZED_CONTEXT_MANAGERS,
+ Feature.PATTERN_MATCHING,
+ Feature.EXCEPT_STAR,
+ Feature.VARIADIC_GENERICS,
+ Feature.TYPE_PARAMS,
+ },
+ TargetVersion.PY313: {
+ Feature.F_STRINGS,
+ Feature.DEBUG_F_STRINGS,
+ Feature.NUMERIC_UNDERSCORES,
+ Feature.TRAILING_COMMA_IN_CALL,
+ Feature.TRAILING_COMMA_IN_DEF,
+ Feature.ASYNC_KEYWORDS,
+ Feature.FUTURE_ANNOTATIONS,
+ Feature.ASSIGNMENT_EXPRESSIONS,
+ Feature.RELAXED_DECORATORS,
+ Feature.POS_ONLY_ARGUMENTS,
+ Feature.UNPACKING_ON_FLOW,
+ Feature.ANN_ASSIGN_EXTENDED_RHS,
+ Feature.PARENTHESIZED_CONTEXT_MANAGERS,
+ Feature.PATTERN_MATCHING,
+ Feature.EXCEPT_STAR,
+ Feature.VARIADIC_GENERICS,
+ Feature.TYPE_PARAMS,
+ Feature.TYPE_PARAM_DEFAULTS,
+ },
+ TargetVersion.PY314: {
+ Feature.F_STRINGS,
+ Feature.DEBUG_F_STRINGS,
+ Feature.NUMERIC_UNDERSCORES,
+ Feature.TRAILING_COMMA_IN_CALL,
+ Feature.TRAILING_COMMA_IN_DEF,
+ Feature.ASYNC_KEYWORDS,
+ Feature.FUTURE_ANNOTATIONS,
+ Feature.ASSIGNMENT_EXPRESSIONS,
+ Feature.RELAXED_DECORATORS,
+ Feature.POS_ONLY_ARGUMENTS,
+ Feature.UNPACKING_ON_FLOW,
+ Feature.ANN_ASSIGN_EXTENDED_RHS,
+ Feature.PARENTHESIZED_CONTEXT_MANAGERS,
+ Feature.PATTERN_MATCHING,
+ Feature.EXCEPT_STAR,
+ Feature.VARIADIC_GENERICS,
+ Feature.TYPE_PARAMS,
+ Feature.TYPE_PARAM_DEFAULTS,
+ Feature.UNPARENTHESIZED_EXCEPT_TYPES,
+ Feature.T_STRINGS,
+ },
+}
+
+
+def supports_feature(target_versions: set[TargetVersion], feature: Feature) -> bool:
+ if not target_versions:
+ raise ValueError("target_versions must not be empty")
+
+ return all(feature in VERSION_TO_FEATURES[version] for version in target_versions)
+
+
+class Preview(Enum):
+ """Individual preview style features."""
+
+ # NOTE: string_processing requires wrap_long_dict_values_in_parens
+ # for https://github.com/psf/black/issues/3117 to be fixed.
+ string_processing = auto()
+ hug_parens_with_braces_and_square_brackets = auto()
+ wrap_long_dict_values_in_parens = auto()
+ multiline_string_handling = auto()
+ always_one_newline_after_import = auto()
+ fix_fmt_skip_in_one_liners = auto()
+ standardize_type_comments = auto()
+ wrap_comprehension_in = auto()
+ # Remove parentheses around multiple exception types in except and
+ # except* without as. See PEP 758 for details.
+ remove_parens_around_except_types = auto()
+ normalize_cr_newlines = auto()
+ fix_module_docstring_detection = auto()
+ fix_type_expansion_split = auto()
+ remove_parens_from_assignment_lhs = auto()
+
+
+UNSTABLE_FEATURES: set[Preview] = {
+ # Many issues, see summary in https://github.com/psf/black/issues/4042
+ Preview.string_processing,
+ # See issue #4036 (crash), #4098, #4099 (proposed tweaks)
+ Preview.hug_parens_with_braces_and_square_brackets,
+}
+
+
+class Deprecated(UserWarning):
+ """Visible deprecation warning."""
+
+
+_MAX_CACHE_KEY_PART_LENGTH: Final = 32
+
+
+@dataclass
+class Mode:
+ target_versions: set[TargetVersion] = field(default_factory=set)
+ line_length: int = DEFAULT_LINE_LENGTH
+ string_normalization: bool = True
+ is_pyi: bool = False
+ is_ipynb: bool = False
+ skip_source_first_line: bool = False
+ magic_trailing_comma: bool = True
+ python_cell_magics: set[str] = field(default_factory=set)
+ preview: bool = False
+ unstable: bool = False
+ enabled_features: set[Preview] = field(default_factory=set)
+
+ def __contains__(self, feature: Preview) -> bool:
+ """
+ Provide `Preview.FEATURE in Mode` syntax that mirrors the ``preview`` flag.
+
+ In unstable mode, all features are enabled. In preview mode, all features
+ except those in UNSTABLE_FEATURES are enabled. Any features in
+ `self.enabled_features` are also enabled.
+ """
+ if self.unstable:
+ return True
+ if feature in self.enabled_features:
+ return True
+ return self.preview and feature not in UNSTABLE_FEATURES
+
+ def get_cache_key(self) -> str:
+ if self.target_versions:
+ version_str = ",".join(
+ str(version.value)
+ for version in sorted(self.target_versions, key=attrgetter("value"))
+ )
+ else:
+ version_str = "-"
+ if len(version_str) > _MAX_CACHE_KEY_PART_LENGTH:
+ version_str = sha256(version_str.encode()).hexdigest()[
+ :_MAX_CACHE_KEY_PART_LENGTH
+ ]
+ features_and_magics = (
+ ",".join(sorted(f.name for f in self.enabled_features))
+ + "@"
+ + ",".join(sorted(self.python_cell_magics))
+ )
+ if len(features_and_magics) > _MAX_CACHE_KEY_PART_LENGTH:
+ features_and_magics = sha256(features_and_magics.encode()).hexdigest()[
+ :_MAX_CACHE_KEY_PART_LENGTH
+ ]
+ parts = [
+ version_str,
+ str(self.line_length),
+ str(int(self.string_normalization)),
+ str(int(self.is_pyi)),
+ str(int(self.is_ipynb)),
+ str(int(self.skip_source_first_line)),
+ str(int(self.magic_trailing_comma)),
+ str(int(self.preview)),
+ str(int(self.unstable)),
+ features_and_magics,
+ ]
+ return ".".join(parts)
+
+ def __hash__(self) -> int:
+ return hash((
+ frozenset(self.target_versions),
+ self.line_length,
+ self.string_normalization,
+ self.is_pyi,
+ self.is_ipynb,
+ self.skip_source_first_line,
+ self.magic_trailing_comma,
+ frozenset(self.python_cell_magics),
+ self.preview,
+ self.unstable,
+ frozenset(self.enabled_features),
+ ))
diff --git a/py311/lib/python3.11/site-packages/black/nodes.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/nodes.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..d4bcc979d826f206229a6a9d643dad782928723d
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/nodes.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/nodes.py b/py311/lib/python3.11/site-packages/black/nodes.py
new file mode 100644
index 0000000000000000000000000000000000000000..96bc20f20b3674e6679e8a034d9160357bd18c83
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/nodes.py
@@ -0,0 +1,1093 @@
+"""
+blib2to3 Node/Leaf transformation-related utility functions.
+"""
+
+from collections.abc import Iterator
+from typing import Final, Generic, Literal, TypeGuard, TypeVar, Union
+
+from mypy_extensions import mypyc_attr
+
+from black.cache import CACHE_DIR
+from black.mode import Mode, Preview
+from black.strings import get_string_prefix, has_triple_quotes
+from blib2to3 import pygram
+from blib2to3.pgen2 import token
+from blib2to3.pytree import NL, Leaf, Node, type_repr
+
+pygram.initialize(CACHE_DIR)
+syms: Final = pygram.python_symbols
+
+
+# types
+T = TypeVar("T")
+LN = Union[Leaf, Node]
+LeafID = int
+NodeType = int
+
+
+WHITESPACE: Final = {token.DEDENT, token.INDENT, token.NEWLINE}
+STATEMENT: Final = {
+ syms.if_stmt,
+ syms.while_stmt,
+ syms.for_stmt,
+ syms.try_stmt,
+ syms.except_clause,
+ syms.with_stmt,
+ syms.funcdef,
+ syms.classdef,
+ syms.match_stmt,
+ syms.case_block,
+}
+STANDALONE_COMMENT: Final = 153
+token.tok_name[STANDALONE_COMMENT] = "STANDALONE_COMMENT"
+LOGIC_OPERATORS: Final = {"and", "or"}
+COMPARATORS: Final = {
+ token.LESS,
+ token.GREATER,
+ token.EQEQUAL,
+ token.NOTEQUAL,
+ token.LESSEQUAL,
+ token.GREATEREQUAL,
+}
+MATH_OPERATORS: Final = {
+ token.VBAR,
+ token.CIRCUMFLEX,
+ token.AMPER,
+ token.LEFTSHIFT,
+ token.RIGHTSHIFT,
+ token.PLUS,
+ token.MINUS,
+ token.STAR,
+ token.SLASH,
+ token.DOUBLESLASH,
+ token.PERCENT,
+ token.AT,
+ token.TILDE,
+ token.DOUBLESTAR,
+}
+STARS: Final = {token.STAR, token.DOUBLESTAR}
+VARARGS_SPECIALS: Final = STARS | {token.SLASH}
+VARARGS_PARENTS: Final = {
+ syms.arglist,
+ syms.argument, # double star in arglist
+ syms.trailer, # single argument to call
+ syms.typedargslist,
+ syms.varargslist, # lambdas
+}
+UNPACKING_PARENTS: Final = {
+ syms.atom, # single element of a list or set literal
+ syms.dictsetmaker,
+ syms.listmaker,
+ syms.testlist_gexp,
+ syms.testlist_star_expr,
+ syms.subject_expr,
+ syms.pattern,
+}
+TEST_DESCENDANTS: Final = {
+ syms.test,
+ syms.lambdef,
+ syms.or_test,
+ syms.and_test,
+ syms.not_test,
+ syms.comparison,
+ syms.star_expr,
+ syms.expr,
+ syms.xor_expr,
+ syms.and_expr,
+ syms.shift_expr,
+ syms.arith_expr,
+ syms.trailer,
+ syms.term,
+ syms.power,
+ syms.namedexpr_test,
+}
+TYPED_NAMES: Final = {syms.tname, syms.tname_star}
+ASSIGNMENTS: Final = {
+ "=",
+ "+=",
+ "-=",
+ "*=",
+ "@=",
+ "/=",
+ "%=",
+ "&=",
+ "|=",
+ "^=",
+ "<<=",
+ ">>=",
+ "**=",
+ "//=",
+ ":",
+}
+
+IMPLICIT_TUPLE: Final = {syms.testlist, syms.testlist_star_expr, syms.exprlist}
+BRACKET: Final = {
+ token.LPAR: token.RPAR,
+ token.LSQB: token.RSQB,
+ token.LBRACE: token.RBRACE,
+}
+OPENING_BRACKETS: Final = set(BRACKET.keys())
+CLOSING_BRACKETS: Final = set(BRACKET.values())
+BRACKETS: Final = OPENING_BRACKETS | CLOSING_BRACKETS
+ALWAYS_NO_SPACE: Final = CLOSING_BRACKETS | {
+ token.COMMA,
+ STANDALONE_COMMENT,
+ token.FSTRING_MIDDLE,
+ token.FSTRING_END,
+ token.TSTRING_MIDDLE,
+ token.TSTRING_END,
+ token.BANG,
+}
+
+RARROW = 55
+
+
+@mypyc_attr(allow_interpreted_subclasses=True)
+class Visitor(Generic[T]):
+ """Basic lib2to3 visitor that yields things of type `T` on `visit()`."""
+
+ def visit(self, node: LN) -> Iterator[T]:
+ """Main method to visit `node` and its children.
+
+ It tries to find a `visit_*()` method for the given `node.type`, like
+ `visit_simple_stmt` for Node objects or `visit_INDENT` for Leaf objects.
+ If no dedicated `visit_*()` method is found, chooses `visit_default()`
+ instead.
+
+ Then yields objects of type `T` from the selected visitor.
+ """
+ if node.type < 256:
+ name = token.tok_name[node.type]
+ else:
+ name = str(type_repr(node.type))
+ # We explicitly branch on whether a visitor exists (instead of
+ # using self.visit_default as the default arg to getattr) in order
+ # to save needing to create a bound method object and so mypyc can
+ # generate a native call to visit_default.
+ visitf = getattr(self, f"visit_{name}", None)
+ if visitf:
+ yield from visitf(node)
+ else:
+ yield from self.visit_default(node)
+
+ def visit_default(self, node: LN) -> Iterator[T]:
+ """Default `visit_*()` implementation. Recurses to children of `node`."""
+ if isinstance(node, Node):
+ for child in node.children:
+ yield from self.visit(child)
+
+
+def whitespace(leaf: Leaf, *, complex_subscript: bool, mode: Mode) -> str:
+ """Return whitespace prefix if needed for the given `leaf`.
+
+ `complex_subscript` signals whether the given leaf is part of a subscription
+ which has non-trivial arguments, like arithmetic expressions or function calls.
+ """
+ NO: Final[str] = ""
+ SPACE: Final[str] = " "
+ DOUBLESPACE: Final[str] = " "
+ t = leaf.type
+ p = leaf.parent
+ v = leaf.value
+ if t in ALWAYS_NO_SPACE:
+ return NO
+
+ if t == token.COMMENT:
+ return DOUBLESPACE
+
+ assert p is not None, f"INTERNAL ERROR: hand-made leaf without parent: {leaf!r}"
+ if t == token.COLON and p.type not in {
+ syms.subscript,
+ syms.subscriptlist,
+ syms.sliceop,
+ }:
+ return NO
+
+ if t == token.LBRACE and p.type in (
+ syms.fstring_replacement_field,
+ syms.tstring_replacement_field,
+ ):
+ return NO
+
+ prev = leaf.prev_sibling
+ if not prev:
+ prevp = preceding_leaf(p)
+ if not prevp or prevp.type in OPENING_BRACKETS:
+ return NO
+
+ if t == token.COLON:
+ if prevp.type == token.COLON:
+ return NO
+
+ elif prevp.type != token.COMMA and not complex_subscript:
+ return NO
+
+ return SPACE
+
+ if prevp.type == token.EQUAL:
+ if prevp.parent:
+ if prevp.parent.type in {
+ syms.arglist,
+ syms.argument,
+ syms.parameters,
+ syms.varargslist,
+ }:
+ return NO
+
+ elif prevp.parent.type == syms.typedargslist:
+ # A bit hacky: if the equal sign has whitespace, it means we
+ # previously found it's a typed argument. So, we're using
+ # that, too.
+ return prevp.prefix
+
+ elif (
+ prevp.type == token.STAR
+ and parent_type(prevp) == syms.star_expr
+ and parent_type(prevp.parent) in (syms.subscriptlist, syms.tname_star)
+ ):
+ # No space between typevar tuples or unpacking them.
+ return NO
+
+ elif prevp.type in VARARGS_SPECIALS:
+ if is_vararg(prevp, within=VARARGS_PARENTS | UNPACKING_PARENTS):
+ return NO
+
+ elif prevp.type == token.COLON:
+ if prevp.parent and prevp.parent.type in {syms.subscript, syms.sliceop}:
+ return SPACE if complex_subscript else NO
+
+ elif (
+ prevp.parent
+ and prevp.parent.type == syms.factor
+ and prevp.type in MATH_OPERATORS
+ ):
+ return NO
+
+ elif prevp.type == token.AT and p.parent and p.parent.type == syms.decorator:
+ # no space in decorators
+ return NO
+
+ elif prev.type in OPENING_BRACKETS:
+ return NO
+
+ elif prev.type == token.BANG:
+ return NO
+
+ if p.type in {syms.parameters, syms.arglist}:
+ # untyped function signatures or calls
+ if not prev or prev.type != token.COMMA:
+ return NO
+
+ elif p.type == syms.varargslist:
+ # lambdas
+ if prev and prev.type != token.COMMA:
+ return NO
+
+ elif p.type == syms.typedargslist:
+ # typed function signatures
+ if not prev:
+ return NO
+
+ if t == token.EQUAL:
+ if prev.type not in TYPED_NAMES:
+ return NO
+
+ elif prev.type == token.EQUAL:
+ # A bit hacky: if the equal sign has whitespace, it means we
+ # previously found it's a typed argument. So, we're using that, too.
+ return prev.prefix
+
+ elif prev.type != token.COMMA:
+ return NO
+
+ elif p.type in TYPED_NAMES:
+ # type names
+ if not prev:
+ prevp = preceding_leaf(p)
+ if not prevp or prevp.type != token.COMMA:
+ return NO
+
+ elif p.type == syms.trailer:
+ # attributes and calls
+ if t == token.LPAR or t == token.RPAR:
+ return NO
+
+ if not prev:
+ if t == token.DOT or t == token.LSQB:
+ return NO
+
+ elif prev.type != token.COMMA:
+ return NO
+
+ elif p.type == syms.argument:
+ # single argument
+ if t == token.EQUAL:
+ return NO
+
+ if not prev:
+ prevp = preceding_leaf(p)
+ if not prevp or prevp.type == token.LPAR:
+ return NO
+
+ elif prev.type in {token.EQUAL} | VARARGS_SPECIALS:
+ return NO
+
+ elif p.type == syms.decorator:
+ # decorators
+ return NO
+
+ elif p.type == syms.dotted_name:
+ if prev:
+ return NO
+
+ prevp = preceding_leaf(p)
+ if not prevp or prevp.type == token.AT or prevp.type == token.DOT:
+ return NO
+
+ elif p.type == syms.classdef:
+ if t == token.LPAR:
+ return NO
+
+ if prev and prev.type == token.LPAR:
+ return NO
+
+ elif p.type in {syms.subscript, syms.sliceop}:
+ # indexing
+ if not prev:
+ assert p.parent is not None, "subscripts are always parented"
+ if p.parent.type == syms.subscriptlist:
+ return SPACE
+
+ return NO
+
+ elif t == token.COLONEQUAL or prev.type == token.COLONEQUAL:
+ return SPACE
+
+ elif not complex_subscript:
+ return NO
+
+ elif p.type == syms.atom:
+ if prev and t == token.DOT:
+ # dots, but not the first one.
+ return NO
+
+ elif p.type == syms.dictsetmaker:
+ # dict unpacking
+ if prev and prev.type == token.DOUBLESTAR:
+ return NO
+
+ elif p.type in {syms.factor, syms.star_expr}:
+ # unary ops
+ if not prev:
+ prevp = preceding_leaf(p)
+ if not prevp or prevp.type in OPENING_BRACKETS:
+ return NO
+
+ prevp_parent = prevp.parent
+ assert prevp_parent is not None
+ if prevp.type == token.COLON and prevp_parent.type in {
+ syms.subscript,
+ syms.sliceop,
+ }:
+ return NO
+
+ elif prevp.type == token.EQUAL and prevp_parent.type == syms.argument:
+ return NO
+
+ elif t in {token.NAME, token.NUMBER, token.STRING}:
+ return NO
+
+ elif p.type == syms.import_from:
+ if t == token.DOT:
+ if prev and prev.type == token.DOT:
+ return NO
+
+ elif t == token.NAME:
+ if v == "import":
+ return SPACE
+
+ if prev and prev.type == token.DOT:
+ return NO
+
+ elif p.type == syms.sliceop:
+ return NO
+
+ elif p.type == syms.except_clause:
+ if t == token.STAR:
+ return NO
+
+ return SPACE
+
+
+def make_simple_prefix(nl_count: int, form_feed: bool, empty_line: str = "\n") -> str:
+ """Generate a normalized prefix string."""
+ if form_feed:
+ return (empty_line * (nl_count - 1)) + "\f" + empty_line
+ return empty_line * nl_count
+
+
+def preceding_leaf(node: LN | None) -> Leaf | None:
+ """Return the first leaf that precedes `node`, if any."""
+ while node:
+ res = node.prev_sibling
+ if res:
+ if isinstance(res, Leaf):
+ return res
+
+ try:
+ return list(res.leaves())[-1]
+
+ except IndexError:
+ return None
+
+ node = node.parent
+ return None
+
+
+def prev_siblings_are(node: LN | None, tokens: list[NodeType | None]) -> bool:
+ """Return if the `node` and its previous siblings match types against the provided
+ list of tokens; the provided `node`has its type matched against the last element in
+ the list. `None` can be used as the first element to declare that the start of the
+ list is anchored at the start of its parent's children."""
+ if not tokens:
+ return True
+ if tokens[-1] is None:
+ return node is None
+ if not node:
+ return False
+ if node.type != tokens[-1]:
+ return False
+ return prev_siblings_are(node.prev_sibling, tokens[:-1])
+
+
+def parent_type(node: LN | None) -> NodeType | None:
+ """
+ Returns:
+ @node.parent.type, if @node is not None and has a parent.
+ OR
+ None, otherwise.
+ """
+ if node is None or node.parent is None:
+ return None
+
+ return node.parent.type
+
+
+def child_towards(ancestor: Node, descendant: LN) -> LN | None:
+ """Return the child of `ancestor` that contains `descendant`."""
+ node: LN | None = descendant
+ while node and node.parent != ancestor:
+ node = node.parent
+ return node
+
+
+def replace_child(old_child: LN, new_child: LN) -> None:
+ """
+ Side Effects:
+ * If @old_child.parent is set, replace @old_child with @new_child in
+ @old_child's underlying Node structure.
+ OR
+ * Otherwise, this function does nothing.
+ """
+ parent = old_child.parent
+ if not parent:
+ return
+
+ child_idx = old_child.remove()
+ if child_idx is not None:
+ parent.insert_child(child_idx, new_child)
+
+
+def container_of(leaf: Leaf) -> LN:
+ """Return `leaf` or one of its ancestors that is the topmost container of it.
+
+ By "container" we mean a node where `leaf` is the very first child.
+ """
+ same_prefix = leaf.prefix
+ container: LN = leaf
+ while container:
+ parent = container.parent
+ if parent is None:
+ break
+
+ if parent.children[0].prefix != same_prefix:
+ break
+
+ if parent.type == syms.file_input:
+ break
+
+ if parent.prev_sibling is not None and parent.prev_sibling.type in BRACKETS:
+ break
+
+ container = parent
+ return container
+
+
+def first_leaf_of(node: LN) -> Leaf | None:
+ """Returns the first leaf of the node tree."""
+ if isinstance(node, Leaf):
+ return node
+ if node.children:
+ return first_leaf_of(node.children[0])
+ else:
+ return None
+
+
+def is_arith_like(node: LN) -> bool:
+ """Whether node is an arithmetic or a binary arithmetic expression"""
+ return node.type in {
+ syms.arith_expr,
+ syms.shift_expr,
+ syms.xor_expr,
+ syms.and_expr,
+ }
+
+
+def is_docstring(node: NL) -> bool:
+ if isinstance(node, Leaf):
+ if node.type != token.STRING:
+ return False
+
+ prefix = get_string_prefix(node.value)
+ if set(prefix).intersection("bBfF"):
+ return False
+
+ if (
+ node.parent
+ and node.parent.type == syms.simple_stmt
+ and not node.parent.prev_sibling
+ and node.parent.parent
+ and node.parent.parent.type == syms.file_input
+ ):
+ return True
+
+ if prev_siblings_are(
+ node.parent, [None, token.NEWLINE, token.INDENT, syms.simple_stmt]
+ ):
+ return True
+
+ # Multiline docstring on the same line as the `def`.
+ if prev_siblings_are(node.parent, [syms.parameters, token.COLON, syms.simple_stmt]):
+ # `syms.parameters` is only used in funcdefs and async_funcdefs in the Python
+ # grammar. We're safe to return True without further checks.
+ return True
+
+ return False
+
+
+def is_empty_tuple(node: LN) -> bool:
+ """Return True if `node` holds an empty tuple."""
+ return (
+ node.type == syms.atom
+ and len(node.children) == 2
+ and node.children[0].type == token.LPAR
+ and node.children[1].type == token.RPAR
+ )
+
+
+def is_one_tuple(node: LN) -> bool:
+ """Return True if `node` holds a tuple with one element, with or without parens."""
+ if node.type == syms.atom:
+ gexp = unwrap_singleton_parenthesis(node)
+ if gexp is None or gexp.type != syms.testlist_gexp:
+ return False
+
+ return len(gexp.children) == 2 and gexp.children[1].type == token.COMMA
+
+ return (
+ node.type in IMPLICIT_TUPLE
+ and len(node.children) == 2
+ and node.children[1].type == token.COMMA
+ )
+
+
+def is_tuple(node: LN) -> bool:
+ """Return True if `node` holds a tuple."""
+ if node.type != syms.atom:
+ return False
+ gexp = unwrap_singleton_parenthesis(node)
+ if gexp is None or gexp.type != syms.testlist_gexp:
+ return False
+
+ return True
+
+
+def is_tuple_containing_walrus(node: LN) -> bool:
+ """Return True if `node` holds a tuple that contains a walrus operator."""
+ if node.type != syms.atom:
+ return False
+ gexp = unwrap_singleton_parenthesis(node)
+ if gexp is None or gexp.type != syms.testlist_gexp:
+ return False
+
+ return any(child.type == syms.namedexpr_test for child in gexp.children)
+
+
+def is_tuple_containing_star(node: LN) -> bool:
+ """Return True if `node` holds a tuple that contains a star operator."""
+ if node.type != syms.atom:
+ return False
+ gexp = unwrap_singleton_parenthesis(node)
+ if gexp is None or gexp.type != syms.testlist_gexp:
+ return False
+
+ return any(child.type == syms.star_expr for child in gexp.children)
+
+
+def is_generator(node: LN) -> bool:
+ """Return True if `node` holds a generator."""
+ if node.type != syms.atom:
+ return False
+ gexp = unwrap_singleton_parenthesis(node)
+ if gexp is None or gexp.type != syms.testlist_gexp:
+ return False
+
+ return any(child.type == syms.old_comp_for for child in gexp.children)
+
+
+def is_one_sequence_between(
+ opening: Leaf,
+ closing: Leaf,
+ leaves: list[Leaf],
+ brackets: tuple[int, int] = (token.LPAR, token.RPAR),
+) -> bool:
+ """Return True if content between `opening` and `closing` is a one-sequence."""
+ if (opening.type, closing.type) != brackets:
+ return False
+
+ depth = closing.bracket_depth + 1
+ for _opening_index, leaf in enumerate(leaves):
+ if leaf is opening:
+ break
+
+ else:
+ raise LookupError("Opening paren not found in `leaves`")
+
+ commas = 0
+ _opening_index += 1
+ for leaf in leaves[_opening_index:]:
+ if leaf is closing:
+ break
+
+ bracket_depth = leaf.bracket_depth
+ if bracket_depth == depth and leaf.type == token.COMMA:
+ commas += 1
+ if leaf.parent and leaf.parent.type in {
+ syms.arglist,
+ syms.typedargslist,
+ }:
+ commas += 1
+ break
+
+ return commas < 2
+
+
+def is_walrus_assignment(node: LN) -> bool:
+ """Return True iff `node` is of the shape ( test := test )"""
+ inner = unwrap_singleton_parenthesis(node)
+ return inner is not None and inner.type == syms.namedexpr_test
+
+
+def is_simple_decorator_trailer(node: LN, last: bool = False) -> bool:
+ """Return True iff `node` is a trailer valid in a simple decorator"""
+ return node.type == syms.trailer and (
+ (
+ len(node.children) == 2
+ and node.children[0].type == token.DOT
+ and node.children[1].type == token.NAME
+ )
+ # last trailer can be an argument-less parentheses pair
+ or (
+ last
+ and len(node.children) == 2
+ and node.children[0].type == token.LPAR
+ and node.children[1].type == token.RPAR
+ )
+ # last trailer can be arguments
+ or (
+ last
+ and len(node.children) == 3
+ and node.children[0].type == token.LPAR
+ # and node.children[1].type == syms.argument
+ and node.children[2].type == token.RPAR
+ )
+ )
+
+
+def is_simple_decorator_expression(node: LN) -> bool:
+ """Return True iff `node` could be a 'dotted name' decorator
+
+ This function takes the node of the 'namedexpr_test' of the new decorator
+ grammar and test if it would be valid under the old decorator grammar.
+
+ The old grammar was: decorator: @ dotted_name [arguments] NEWLINE
+ The new grammar is : decorator: @ namedexpr_test NEWLINE
+ """
+ if node.type == token.NAME:
+ return True
+ if node.type == syms.power:
+ if node.children:
+ return (
+ node.children[0].type == token.NAME
+ and all(map(is_simple_decorator_trailer, node.children[1:-1]))
+ and (
+ len(node.children) < 2
+ or is_simple_decorator_trailer(node.children[-1], last=True)
+ )
+ )
+ return False
+
+
+def is_yield(node: LN) -> bool:
+ """Return True if `node` holds a `yield` or `yield from` expression."""
+ if node.type == syms.yield_expr:
+ return True
+
+ if is_name_token(node) and node.value == "yield":
+ return True
+
+ if node.type != syms.atom:
+ return False
+
+ if len(node.children) != 3:
+ return False
+
+ lpar, expr, rpar = node.children
+ if lpar.type == token.LPAR and rpar.type == token.RPAR:
+ return is_yield(expr)
+
+ return False
+
+
+def is_vararg(leaf: Leaf, within: set[NodeType]) -> bool:
+ """Return True if `leaf` is a star or double star in a vararg or kwarg.
+
+ If `within` includes VARARGS_PARENTS, this applies to function signatures.
+ If `within` includes UNPACKING_PARENTS, it applies to right hand-side
+ extended iterable unpacking (PEP 3132) and additional unpacking
+ generalizations (PEP 448).
+ """
+ if leaf.type not in VARARGS_SPECIALS or not leaf.parent:
+ return False
+
+ p = leaf.parent
+ if p.type == syms.star_expr:
+ # Star expressions are also used as assignment targets in extended
+ # iterable unpacking (PEP 3132). See what its parent is instead.
+ if not p.parent:
+ return False
+
+ p = p.parent
+
+ return p.type in within
+
+
+def is_fstring(node: Node) -> bool:
+ """Return True if the node is an f-string"""
+ return node.type == syms.fstring
+
+
+def fstring_tstring_to_string(node: Node) -> Leaf:
+ """Converts an fstring or tstring node back to a string node."""
+ string_without_prefix = str(node)[len(node.prefix) :]
+ string_leaf = Leaf(token.STRING, string_without_prefix, prefix=node.prefix)
+ string_leaf.lineno = node.get_lineno() or 0
+ return string_leaf
+
+
+def is_multiline_string(node: LN) -> bool:
+ """Return True if `leaf` is a multiline string that actually spans many lines."""
+ if isinstance(node, Node) and is_fstring(node):
+ leaf = fstring_tstring_to_string(node)
+ elif isinstance(node, Leaf):
+ leaf = node
+ else:
+ return False
+
+ return has_triple_quotes(leaf.value) and "\n" in leaf.value
+
+
+def is_parent_function_or_class(node: Node) -> bool:
+ assert node.type in {syms.suite, syms.simple_stmt}
+ assert node.parent is not None
+ # Note this works for suites / simple_stmts in async def as well
+ return node.parent.type in {syms.funcdef, syms.classdef}
+
+
+def is_function_or_class(node: Node) -> bool:
+ return node.type in {syms.funcdef, syms.classdef, syms.async_funcdef}
+
+
+def is_stub_suite(node: Node) -> bool:
+ """Return True if `node` is a suite with a stub body."""
+ if node.parent is not None and not is_parent_function_or_class(node):
+ return False
+
+ # If there is a comment, we want to keep it.
+ if node.prefix.strip():
+ return False
+
+ if (
+ len(node.children) != 4
+ or node.children[0].type != token.NEWLINE
+ or node.children[1].type != token.INDENT
+ or node.children[3].type != token.DEDENT
+ ):
+ return False
+
+ if node.children[3].prefix.strip():
+ return False
+
+ return is_stub_body(node.children[2])
+
+
+def is_stub_body(node: LN) -> bool:
+ """Return True if `node` is a simple statement containing an ellipsis."""
+ if not isinstance(node, Node) or node.type != syms.simple_stmt:
+ return False
+
+ if len(node.children) != 2:
+ return False
+
+ child = node.children[0]
+ return (
+ not child.prefix.strip()
+ and child.type == syms.atom
+ and len(child.children) == 3
+ and all(leaf == Leaf(token.DOT, ".") for leaf in child.children)
+ )
+
+
+def is_atom_with_invisible_parens(node: LN) -> bool:
+ """Given a `LN`, determines whether it's an atom `node` with invisible
+ parens. Useful in dedupe-ing and normalizing parens.
+ """
+ if isinstance(node, Leaf) or node.type != syms.atom:
+ return False
+
+ first, last = node.children[0], node.children[-1]
+ return (
+ isinstance(first, Leaf)
+ and first.type == token.LPAR
+ and first.value == ""
+ and isinstance(last, Leaf)
+ and last.type == token.RPAR
+ and last.value == ""
+ )
+
+
+def is_empty_par(leaf: Leaf) -> bool:
+ return is_empty_lpar(leaf) or is_empty_rpar(leaf)
+
+
+def is_empty_lpar(leaf: Leaf) -> bool:
+ return leaf.type == token.LPAR and leaf.value == ""
+
+
+def is_empty_rpar(leaf: Leaf) -> bool:
+ return leaf.type == token.RPAR and leaf.value == ""
+
+
+def is_import(leaf: Leaf) -> bool:
+ """Return True if the given leaf starts an import statement."""
+ p = leaf.parent
+ t = leaf.type
+ v = leaf.value
+ return bool(
+ t == token.NAME
+ and (
+ (v == "import" and p and p.type == syms.import_name)
+ or (v == "from" and p and p.type == syms.import_from)
+ )
+ )
+
+
+def is_with_or_async_with_stmt(leaf: Leaf) -> bool:
+ """Return True if the given leaf starts a with or async with statement."""
+ return bool(
+ leaf.type == token.NAME
+ and leaf.value == "with"
+ and leaf.parent
+ and leaf.parent.type == syms.with_stmt
+ ) or bool(
+ leaf.type == token.ASYNC
+ and leaf.next_sibling
+ and leaf.next_sibling.type == syms.with_stmt
+ )
+
+
+def is_async_stmt_or_funcdef(leaf: Leaf) -> bool:
+ """Return True if the given leaf starts an async def/for/with statement.
+
+ Note that `async def` can be either an `async_stmt` or `async_funcdef`,
+ the latter is used when it has decorators.
+ """
+ return bool(
+ leaf.type == token.ASYNC
+ and leaf.parent
+ and leaf.parent.type in {syms.async_stmt, syms.async_funcdef}
+ )
+
+
+def is_type_comment(leaf: Leaf, mode: Mode) -> bool:
+ """Return True if the given leaf is a type comment. This function should only
+ be used for general type comments (excluding ignore annotations, which should
+ use `is_type_ignore_comment`). Note that general type comments are no longer
+ used in modern version of Python, this function may be deprecated in the future."""
+ t = leaf.type
+ v = leaf.value
+ return t in {token.COMMENT, STANDALONE_COMMENT} and is_type_comment_string(v, mode)
+
+
+def is_type_comment_string(value: str, mode: Mode) -> bool:
+ if Preview.standardize_type_comments in mode:
+ is_valid = value.startswith("#") and value[1:].lstrip().startswith("type:")
+ else:
+ is_valid = value.startswith("# type:")
+ return is_valid
+
+
+def is_type_ignore_comment(leaf: Leaf, mode: Mode) -> bool:
+ """Return True if the given leaf is a type comment with ignore annotation."""
+ t = leaf.type
+ v = leaf.value
+ return t in {token.COMMENT, STANDALONE_COMMENT} and is_type_ignore_comment_string(
+ v, mode
+ )
+
+
+def is_type_ignore_comment_string(value: str, mode: Mode) -> bool:
+ """Return True if the given string match with type comment with
+ ignore annotation."""
+ if Preview.standardize_type_comments in mode:
+ is_valid = is_type_comment_string(value, mode) and value.split(":", 1)[
+ 1
+ ].lstrip().startswith("ignore")
+ else:
+ is_valid = value.startswith("# type: ignore")
+
+ return is_valid
+
+
+def wrap_in_parentheses(parent: Node, child: LN, *, visible: bool = True) -> None:
+ """Wrap `child` in parentheses.
+
+ This replaces `child` with an atom holding the parentheses and the old
+ child. That requires moving the prefix.
+
+ If `visible` is False, the leaves will be valueless (and thus invisible).
+ """
+ lpar = Leaf(token.LPAR, "(" if visible else "")
+ rpar = Leaf(token.RPAR, ")" if visible else "")
+ prefix = child.prefix
+ child.prefix = ""
+ index = child.remove() or 0
+ new_child = Node(syms.atom, [lpar, child, rpar])
+ new_child.prefix = prefix
+ parent.insert_child(index, new_child)
+
+
+def unwrap_singleton_parenthesis(node: LN) -> LN | None:
+ """Returns `wrapped` if `node` is of the shape ( wrapped ).
+
+ Parenthesis can be optional. Returns None otherwise"""
+ if len(node.children) != 3:
+ return None
+
+ lpar, wrapped, rpar = node.children
+ if not (lpar.type == token.LPAR and rpar.type == token.RPAR):
+ return None
+
+ return wrapped
+
+
+def ensure_visible(leaf: Leaf) -> None:
+ """Make sure parentheses are visible.
+
+ They could be invisible as part of some statements (see
+ :func:`normalize_invisible_parens` and :func:`visit_import_from`).
+ """
+ if leaf.type == token.LPAR:
+ leaf.value = "("
+ elif leaf.type == token.RPAR:
+ leaf.value = ")"
+
+
+def is_name_token(nl: NL) -> TypeGuard[Leaf]:
+ return nl.type == token.NAME
+
+
+def is_lpar_token(nl: NL) -> TypeGuard[Leaf]:
+ return nl.type == token.LPAR
+
+
+def is_rpar_token(nl: NL) -> TypeGuard[Leaf]:
+ return nl.type == token.RPAR
+
+
+def is_number_token(nl: NL) -> TypeGuard[Leaf]:
+ return nl.type == token.NUMBER
+
+
+def get_annotation_type(leaf: Leaf) -> Literal["return", "param", None]:
+ """Returns the type of annotation this leaf is part of, if any."""
+ ancestor = leaf.parent
+ while ancestor is not None:
+ if ancestor.prev_sibling and ancestor.prev_sibling.type == token.RARROW:
+ return "return"
+ if ancestor.parent and ancestor.parent.type == syms.tname:
+ return "param"
+ ancestor = ancestor.parent
+ return None
+
+
+def is_part_of_annotation(leaf: Leaf) -> bool:
+ """Returns whether this leaf is part of a type annotation."""
+ assert leaf.parent is not None
+ return get_annotation_type(leaf) is not None
+
+
+def first_leaf(node: LN) -> Leaf | None:
+ """Returns the first leaf of the ancestor node."""
+ if isinstance(node, Leaf):
+ return node
+ elif not node.children:
+ return None
+ else:
+ return first_leaf(node.children[0])
+
+
+def last_leaf(node: LN) -> Leaf | None:
+ """Returns the last leaf of the ancestor node."""
+ if isinstance(node, Leaf):
+ return node
+ elif not node.children:
+ return None
+ else:
+ return last_leaf(node.children[-1])
+
+
+def furthest_ancestor_with_last_leaf(leaf: Leaf) -> LN:
+ """Returns the furthest ancestor that has this leaf node as the last leaf."""
+ node: LN = leaf
+ while node.parent and node.parent.children and node is node.parent.children[-1]:
+ node = node.parent
+ return node
+
+
+def has_sibling_with_type(node: LN, type: int) -> bool:
+ # Check previous siblings
+ sibling = node.prev_sibling
+ while sibling is not None:
+ if sibling.type == type:
+ return True
+ sibling = sibling.prev_sibling
+
+ # Check next siblings
+ sibling = node.next_sibling
+ while sibling is not None:
+ if sibling.type == type:
+ return True
+ sibling = sibling.next_sibling
+
+ return False
diff --git a/py311/lib/python3.11/site-packages/black/numerics.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/numerics.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..2b96f8c5b1038c0632544d8671d53ee1475e8823
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/numerics.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/numerics.py b/py311/lib/python3.11/site-packages/black/numerics.py
new file mode 100644
index 0000000000000000000000000000000000000000..3040de06fde3033ba333bca7934e8314c516a148
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/numerics.py
@@ -0,0 +1,61 @@
+"""
+Formatting numeric literals.
+"""
+
+from blib2to3.pytree import Leaf
+
+
+def format_hex(text: str) -> str:
+ """
+ Formats a hexadecimal string like "0x12B3"
+ """
+ before, after = text[:2], text[2:]
+ return f"{before}{after.upper()}"
+
+
+def format_scientific_notation(text: str) -> str:
+ """Formats a numeric string utilizing scientific notation"""
+ before, after = text.split("e")
+ sign = ""
+ if after.startswith("-"):
+ after = after[1:]
+ sign = "-"
+ elif after.startswith("+"):
+ after = after[1:]
+ before = format_float_or_int_string(before)
+ return f"{before}e{sign}{after}"
+
+
+def format_complex_number(text: str) -> str:
+ """Formats a complex string like `10j`"""
+ number = text[:-1]
+ suffix = text[-1]
+ return f"{format_float_or_int_string(number)}{suffix}"
+
+
+def format_float_or_int_string(text: str) -> str:
+ """Formats a float string like "1.0"."""
+ if "." not in text:
+ return text
+
+ before, after = text.split(".")
+ return f"{before or 0}.{after or 0}"
+
+
+def normalize_numeric_literal(leaf: Leaf) -> None:
+ """Normalizes numeric (float, int, and complex) literals.
+
+ All letters used in the representation are normalized to lowercase."""
+ text = leaf.value.lower()
+ if text.startswith(("0o", "0b")):
+ # Leave octal and binary literals alone.
+ pass
+ elif text.startswith("0x"):
+ text = format_hex(text)
+ elif "e" in text:
+ text = format_scientific_notation(text)
+ elif text.endswith("j"):
+ text = format_complex_number(text)
+ else:
+ text = format_float_or_int_string(text)
+ leaf.value = text
diff --git a/py311/lib/python3.11/site-packages/black/output.py b/py311/lib/python3.11/site-packages/black/output.py
new file mode 100644
index 0000000000000000000000000000000000000000..76c28c344a903183ab6fa4c85c80a818ee71674a
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/output.py
@@ -0,0 +1,122 @@
+"""Nice output for Black.
+
+The double calls are for patching purposes in tests.
+"""
+
+import json
+import re
+import tempfile
+from typing import Any
+
+from click import echo, style
+from mypy_extensions import mypyc_attr
+
+
+@mypyc_attr(patchable=True)
+def _out(message: str | None = None, nl: bool = True, **styles: Any) -> None:
+ if message is not None:
+ if "bold" not in styles:
+ styles["bold"] = True
+ message = style(message, **styles)
+ echo(message, nl=nl, err=True)
+
+
+@mypyc_attr(patchable=True)
+def _err(message: str | None = None, nl: bool = True, **styles: Any) -> None:
+ if message is not None:
+ if "fg" not in styles:
+ styles["fg"] = "red"
+ message = style(message, **styles)
+ echo(message, nl=nl, err=True)
+
+
+@mypyc_attr(patchable=True)
+def out(message: str | None = None, nl: bool = True, **styles: Any) -> None:
+ _out(message, nl=nl, **styles)
+
+
+def err(message: str | None = None, nl: bool = True, **styles: Any) -> None:
+ _err(message, nl=nl, **styles)
+
+
+def ipynb_diff(a: str, b: str, a_name: str, b_name: str) -> str:
+ """Return a unified diff string between each cell in notebooks `a` and `b`."""
+ a_nb = json.loads(a)
+ b_nb = json.loads(b)
+ diff_lines = [
+ diff(
+ "".join(a_nb["cells"][cell_number]["source"]) + "\n",
+ "".join(b_nb["cells"][cell_number]["source"]) + "\n",
+ f"{a_name}:cell_{cell_number}",
+ f"{b_name}:cell_{cell_number}",
+ )
+ for cell_number, cell in enumerate(a_nb["cells"])
+ if cell["cell_type"] == "code"
+ ]
+ return "".join(diff_lines)
+
+
+_line_pattern = re.compile(r"(.*?(?:\r\n|\n|\r|$))")
+
+
+def _splitlines_no_ff(source: str) -> list[str]:
+ """Split a string into lines ignoring form feed and other chars.
+
+ This mimics how the Python parser splits source code.
+
+ A simplified version of the function with the same name in Lib/ast.py
+ """
+ result = [match[0] for match in _line_pattern.finditer(source)]
+ if result[-1] == "":
+ result.pop(-1)
+ return result
+
+
+def diff(a: str, b: str, a_name: str, b_name: str) -> str:
+ """Return a unified diff string between strings `a` and `b`."""
+ import difflib
+
+ a_lines = _splitlines_no_ff(a)
+ b_lines = _splitlines_no_ff(b)
+ diff_lines = []
+ for line in difflib.unified_diff(
+ a_lines, b_lines, fromfile=a_name, tofile=b_name, n=5
+ ):
+ # Work around https://bugs.python.org/issue2142
+ # See:
+ # https://www.gnu.org/software/diffutils/manual/html_node/Incomplete-Lines.html
+ if line[-1] == "\n":
+ diff_lines.append(line)
+ else:
+ diff_lines.append(line + "\n")
+ diff_lines.append("\\ No newline at end of file\n")
+ return "".join(diff_lines)
+
+
+def color_diff(contents: str) -> str:
+ """Inject the ANSI color codes to the diff."""
+ lines = contents.split("\n")
+ for i, line in enumerate(lines):
+ if line.startswith("+++") or line.startswith("---"):
+ line = "\033[1m" + line + "\033[0m" # bold, reset
+ elif line.startswith("@@"):
+ line = "\033[36m" + line + "\033[0m" # cyan, reset
+ elif line.startswith("+"):
+ line = "\033[32m" + line + "\033[0m" # green, reset
+ elif line.startswith("-"):
+ line = "\033[31m" + line + "\033[0m" # red, reset
+ lines[i] = line
+ return "\n".join(lines)
+
+
+@mypyc_attr(patchable=True)
+def dump_to_file(*output: str, ensure_final_newline: bool = True) -> str:
+ """Dump `output` to a temporary file. Return path to the file."""
+ with tempfile.NamedTemporaryFile(
+ mode="w", prefix="blk_", suffix=".log", delete=False, encoding="utf8"
+ ) as f:
+ for lines in output:
+ f.write(lines)
+ if ensure_final_newline and lines and lines[-1] != "\n":
+ f.write("\n")
+ return f.name
diff --git a/py311/lib/python3.11/site-packages/black/parsing.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/parsing.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..d7c5d383a757930fb205677d7f74250571608f6d
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/parsing.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/parsing.py b/py311/lib/python3.11/site-packages/black/parsing.py
new file mode 100644
index 0000000000000000000000000000000000000000..46c6d1d8f701468803c56ec5c47dbec59994dca0
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/parsing.py
@@ -0,0 +1,260 @@
+"""
+Parse Python code and perform AST validation.
+"""
+
+import ast
+import sys
+import warnings
+from collections.abc import Collection, Iterator
+
+from black.mode import VERSION_TO_FEATURES, Feature, TargetVersion, supports_feature
+from black.nodes import syms
+from blib2to3 import pygram
+from blib2to3.pgen2 import driver
+from blib2to3.pgen2.grammar import Grammar
+from blib2to3.pgen2.parse import ParseError
+from blib2to3.pgen2.tokenize import TokenError
+from blib2to3.pytree import Leaf, Node
+
+
+class InvalidInput(ValueError):
+ """Raised when input source code fails all parse attempts."""
+
+
+def get_grammars(target_versions: set[TargetVersion]) -> list[Grammar]:
+ if not target_versions:
+ # No target_version specified, so try all grammars.
+ return [
+ # Python 3.7-3.9
+ pygram.python_grammar_async_keywords,
+ # Python 3.0-3.6
+ pygram.python_grammar,
+ # Python 3.10+
+ pygram.python_grammar_soft_keywords,
+ ]
+
+ grammars = []
+ # If we have to parse both, try to parse async as a keyword first
+ if not supports_feature(
+ target_versions, Feature.ASYNC_IDENTIFIERS
+ ) and not supports_feature(target_versions, Feature.PATTERN_MATCHING):
+ # Python 3.7-3.9
+ grammars.append(pygram.python_grammar_async_keywords)
+ if not supports_feature(target_versions, Feature.ASYNC_KEYWORDS):
+ # Python 3.0-3.6
+ grammars.append(pygram.python_grammar)
+ if any(Feature.PATTERN_MATCHING in VERSION_TO_FEATURES[v] for v in target_versions):
+ # Python 3.10+
+ grammars.append(pygram.python_grammar_soft_keywords)
+
+ # At least one of the above branches must have been taken, because every Python
+ # version has exactly one of the two 'ASYNC_*' flags
+ return grammars
+
+
+def lib2to3_parse(
+ src_txt: str, target_versions: Collection[TargetVersion] = ()
+) -> Node:
+ """Given a string with source, return the lib2to3 Node."""
+ if not src_txt.endswith("\n"):
+ src_txt += "\n"
+
+ grammars = get_grammars(set(target_versions))
+ if target_versions:
+ max_tv = max(target_versions, key=lambda tv: tv.value)
+ tv_str = f" for target version {max_tv.pretty()}"
+ else:
+ tv_str = ""
+
+ errors = {}
+ for grammar in grammars:
+ drv = driver.Driver(grammar)
+ try:
+ result = drv.parse_string(src_txt, False)
+ break
+
+ except ParseError as pe:
+ lineno, column = pe.context[1]
+ lines = src_txt.splitlines()
+ try:
+ faulty_line = lines[lineno - 1]
+ except IndexError:
+ faulty_line = ""
+ errors[grammar.version] = InvalidInput(
+ f"Cannot parse{tv_str}: {lineno}:{column}: {faulty_line}"
+ )
+
+ except TokenError as te:
+ # In edge cases these are raised; and typically don't have a "faulty_line".
+ lineno, column = te.args[1]
+ errors[grammar.version] = InvalidInput(
+ f"Cannot parse{tv_str}: {lineno}:{column}: {te.args[0]}"
+ )
+
+ else:
+ # Choose the latest version when raising the actual parsing error.
+ assert len(errors) >= 1
+ exc = errors[max(errors)]
+ raise exc from None
+
+ if isinstance(result, Leaf):
+ result = Node(syms.file_input, [result])
+ return result
+
+
+def matches_grammar(src_txt: str, grammar: Grammar) -> bool:
+ drv = driver.Driver(grammar)
+ try:
+ drv.parse_string(src_txt, False)
+ except (ParseError, TokenError, IndentationError):
+ return False
+ else:
+ return True
+
+
+def lib2to3_unparse(node: Node) -> str:
+ """Given a lib2to3 node, return its string representation."""
+ code = str(node)
+ return code
+
+
+class ASTSafetyError(Exception):
+ """Raised when Black's generated code is not equivalent to the old AST."""
+
+
+def _parse_single_version(
+ src: str, version: tuple[int, int], *, type_comments: bool
+) -> ast.AST:
+ filename = ""
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", SyntaxWarning)
+ warnings.simplefilter("ignore", DeprecationWarning)
+ return ast.parse(
+ src, filename, feature_version=version, type_comments=type_comments
+ )
+
+
+def parse_ast(src: str) -> ast.AST:
+ # TODO: support Python 4+ ;)
+ versions = [(3, minor) for minor in range(3, sys.version_info[1] + 1)]
+
+ first_error = ""
+ for version in sorted(versions, reverse=True):
+ try:
+ return _parse_single_version(src, version, type_comments=True)
+ except SyntaxError as e:
+ if not first_error:
+ first_error = str(e)
+
+ # Try to parse without type comments
+ for version in sorted(versions, reverse=True):
+ try:
+ return _parse_single_version(src, version, type_comments=False)
+ except SyntaxError:
+ pass
+
+ raise SyntaxError(first_error)
+
+
+def _normalize(lineend: str, value: str) -> str:
+ # To normalize, we strip any leading and trailing space from
+ # each line...
+ stripped: list[str] = [i.strip() for i in value.splitlines()]
+ normalized = lineend.join(stripped)
+ # ...and remove any blank lines at the beginning and end of
+ # the whole string
+ return normalized.strip()
+
+
+def stringify_ast(node: ast.AST) -> Iterator[str]:
+ """Simple visitor generating strings to compare ASTs by content."""
+ return _stringify_ast(node, [])
+
+
+def _stringify_ast_with_new_parent(
+ node: ast.AST, parent_stack: list[ast.AST], new_parent: ast.AST
+) -> Iterator[str]:
+ parent_stack.append(new_parent)
+ yield from _stringify_ast(node, parent_stack)
+ parent_stack.pop()
+
+
+def _stringify_ast(node: ast.AST, parent_stack: list[ast.AST]) -> Iterator[str]:
+ if (
+ isinstance(node, ast.Constant)
+ and isinstance(node.value, str)
+ and node.kind == "u"
+ ):
+ # It's a quirk of history that we strip the u prefix over here. We used to
+ # rewrite the AST nodes for Python version compatibility and we never copied
+ # over the kind
+ node.kind = None
+
+ yield f"{' ' * len(parent_stack)}{node.__class__.__name__}("
+
+ for field in sorted(node._fields):
+ # TypeIgnore has only one field 'lineno' which breaks this comparison
+ if isinstance(node, ast.TypeIgnore):
+ break
+
+ try:
+ value: object = getattr(node, field)
+ except AttributeError:
+ continue
+
+ yield f"{' ' * (len(parent_stack) + 1)}{field}="
+
+ if isinstance(value, list):
+ for item in value:
+ # Ignore nested tuples within del statements, because we may insert
+ # parentheses and they change the AST.
+ if (
+ field == "targets"
+ and isinstance(node, ast.Delete)
+ and isinstance(item, ast.Tuple)
+ ):
+ for elt in _unwrap_tuples(item):
+ yield from _stringify_ast_with_new_parent(
+ elt, parent_stack, node
+ )
+
+ elif isinstance(item, ast.AST):
+ yield from _stringify_ast_with_new_parent(item, parent_stack, node)
+
+ elif isinstance(value, ast.AST):
+ yield from _stringify_ast_with_new_parent(value, parent_stack, node)
+
+ else:
+ normalized: object
+ if (
+ isinstance(node, ast.Constant)
+ and field == "value"
+ and isinstance(value, str)
+ and len(parent_stack) >= 2
+ # Any standalone string, ideally this would
+ # exactly match black.nodes.is_docstring
+ and isinstance(parent_stack[-1], ast.Expr)
+ ):
+ # Constant strings may be indented across newlines, if they are
+ # docstrings; fold spaces after newlines when comparing. Similarly,
+ # trailing and leading space may be removed.
+ normalized = _normalize("\n", value)
+ elif field == "type_comment" and isinstance(value, str):
+ # Trailing whitespace in type comments is removed.
+ normalized = value.rstrip()
+ else:
+ normalized = value
+ yield (
+ f"{' ' * (len(parent_stack) + 1)}{normalized!r}, #"
+ f" {value.__class__.__name__}"
+ )
+
+ yield f"{' ' * len(parent_stack)}) # /{node.__class__.__name__}"
+
+
+def _unwrap_tuples(node: ast.Tuple) -> Iterator[ast.AST]:
+ for elt in node.elts:
+ if isinstance(elt, ast.Tuple):
+ yield from _unwrap_tuples(elt)
+ else:
+ yield elt
diff --git a/py311/lib/python3.11/site-packages/black/py.typed b/py311/lib/python3.11/site-packages/black/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/py311/lib/python3.11/site-packages/black/ranges.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/ranges.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..d2b8e1858d2347f5ec2cbcd1cfe519ee927978d4
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/ranges.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/ranges.py b/py311/lib/python3.11/site-packages/black/ranges.py
new file mode 100644
index 0000000000000000000000000000000000000000..d7e003db83f91c30cbc4f61bfc17cf812556c2f9
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/ranges.py
@@ -0,0 +1,534 @@
+"""Functions related to Black's formatting by line ranges feature."""
+
+import difflib
+from collections.abc import Collection, Iterator, Sequence
+from dataclasses import dataclass
+
+from black.nodes import (
+ LN,
+ STANDALONE_COMMENT,
+ Leaf,
+ Node,
+ Visitor,
+ first_leaf,
+ furthest_ancestor_with_last_leaf,
+ last_leaf,
+ syms,
+)
+from blib2to3.pgen2.token import ASYNC, NEWLINE
+
+
+def parse_line_ranges(line_ranges: Sequence[str]) -> list[tuple[int, int]]:
+ lines: list[tuple[int, int]] = []
+ for lines_str in line_ranges:
+ parts = lines_str.split("-")
+ if len(parts) != 2:
+ raise ValueError(
+ "Incorrect --line-ranges format, expect 'START-END', found"
+ f" {lines_str!r}"
+ )
+ try:
+ start = int(parts[0])
+ end = int(parts[1])
+ except ValueError:
+ raise ValueError(
+ "Incorrect --line-ranges value, expect integer ranges, found"
+ f" {lines_str!r}"
+ ) from None
+ else:
+ lines.append((start, end))
+ return lines
+
+
+def is_valid_line_range(lines: tuple[int, int]) -> bool:
+ """Returns whether the line range is valid."""
+ return not lines or lines[0] <= lines[1]
+
+
+def sanitized_lines(
+ lines: Collection[tuple[int, int]], src_contents: str
+) -> Collection[tuple[int, int]]:
+ """Returns the valid line ranges for the given source.
+
+ This removes ranges that are entirely outside the valid lines.
+
+ Other ranges are normalized so that the start values are at least 1 and the
+ end values are at most the (1-based) index of the last source line.
+ """
+ if not src_contents:
+ return []
+ good_lines = []
+ src_line_count = src_contents.count("\n")
+ if not src_contents.endswith("\n"):
+ src_line_count += 1
+ for start, end in lines:
+ if start > src_line_count:
+ continue
+ # line-ranges are 1-based
+ start = max(start, 1)
+ if end < start:
+ continue
+ end = min(end, src_line_count)
+ good_lines.append((start, end))
+ return good_lines
+
+
+def adjusted_lines(
+ lines: Collection[tuple[int, int]],
+ original_source: str,
+ modified_source: str,
+) -> list[tuple[int, int]]:
+ """Returns the adjusted line ranges based on edits from the original code.
+
+ This computes the new line ranges by diffing original_source and
+ modified_source, and adjust each range based on how the range overlaps with
+ the diffs.
+
+ Note the diff can contain lines outside of the original line ranges. This can
+ happen when the formatting has to be done in adjacent to maintain consistent
+ local results. For example:
+
+ 1. def my_func(arg1, arg2,
+ 2. arg3,):
+ 3. pass
+
+ If it restricts to line 2-2, it can't simply reformat line 2, it also has
+ to reformat line 1:
+
+ 1. def my_func(
+ 2. arg1,
+ 3. arg2,
+ 4. arg3,
+ 5. ):
+ 6. pass
+
+ In this case, we will expand the line ranges to also include the whole diff
+ block.
+
+ Args:
+ lines: a collection of line ranges.
+ original_source: the original source.
+ modified_source: the modified source.
+ """
+ lines_mappings = _calculate_lines_mappings(original_source, modified_source)
+
+ new_lines = []
+ # Keep an index of the current search. Since the lines and lines_mappings are
+ # sorted, this makes the search complexity linear.
+ current_mapping_index = 0
+ for start, end in sorted(lines):
+ start_mapping_index = _find_lines_mapping_index(
+ start,
+ lines_mappings,
+ current_mapping_index,
+ )
+ end_mapping_index = _find_lines_mapping_index(
+ end,
+ lines_mappings,
+ start_mapping_index,
+ )
+ current_mapping_index = start_mapping_index
+ if start_mapping_index >= len(lines_mappings) or end_mapping_index >= len(
+ lines_mappings
+ ):
+ # Protect against invalid inputs.
+ continue
+ start_mapping = lines_mappings[start_mapping_index]
+ end_mapping = lines_mappings[end_mapping_index]
+ if start_mapping.is_changed_block:
+ # When the line falls into a changed block, expands to the whole block.
+ new_start = start_mapping.modified_start
+ else:
+ new_start = (
+ start - start_mapping.original_start + start_mapping.modified_start
+ )
+ if end_mapping.is_changed_block:
+ # When the line falls into a changed block, expands to the whole block.
+ new_end = end_mapping.modified_end
+ else:
+ new_end = end - end_mapping.original_start + end_mapping.modified_start
+ new_range = (new_start, new_end)
+ if is_valid_line_range(new_range):
+ new_lines.append(new_range)
+ return new_lines
+
+
+def convert_unchanged_lines(src_node: Node, lines: Collection[tuple[int, int]]) -> None:
+ r"""Converts unchanged lines to STANDALONE_COMMENT.
+
+ The idea is similar to how `# fmt: on/off` is implemented. It also converts the
+ nodes between those markers as a single `STANDALONE_COMMENT` leaf node with
+ the unformatted code as its value. `STANDALONE_COMMENT` is a "fake" token
+ that will be formatted as-is with its prefix normalized.
+
+ Here we perform two passes:
+
+ 1. Visit the top-level statements, and convert them to a single
+ `STANDALONE_COMMENT` when unchanged. This speeds up formatting when some
+ of the top-level statements aren't changed.
+ 2. Convert unchanged "unwrapped lines" to `STANDALONE_COMMENT` nodes line by
+ line. "unwrapped lines" are divided by the `NEWLINE` token. e.g. a
+ multi-line statement is *one* "unwrapped line" that ends with `NEWLINE`,
+ even though this statement itself can span multiple lines, and the
+ tokenizer only sees the last '\n' as the `NEWLINE` token.
+
+ NOTE: During pass (2), comment prefixes and indentations are ALWAYS
+ normalized even when the lines aren't changed. This is fixable by moving
+ more formatting to pass (1). However, it's hard to get it correct when
+ incorrect indentations are used. So we defer this to future optimizations.
+ """
+ lines_set: set[int] = set()
+ for start, end in lines:
+ lines_set.update(range(start, end + 1))
+ visitor = _TopLevelStatementsVisitor(lines_set)
+ _ = list(visitor.visit(src_node)) # Consume all results.
+ _convert_unchanged_line_by_line(src_node, lines_set)
+
+
+def _contains_standalone_comment(node: LN) -> bool:
+ if isinstance(node, Leaf):
+ return node.type == STANDALONE_COMMENT
+ else:
+ for child in node.children:
+ if _contains_standalone_comment(child):
+ return True
+ return False
+
+
+class _TopLevelStatementsVisitor(Visitor[None]):
+ """
+ A node visitor that converts unchanged top-level statements to
+ STANDALONE_COMMENT.
+
+ This is used in addition to _convert_unchanged_line_by_line, to
+ speed up formatting when there are unchanged top-level
+ classes/functions/statements.
+ """
+
+ def __init__(self, lines_set: set[int]):
+ self._lines_set = lines_set
+
+ def visit_simple_stmt(self, node: Node) -> Iterator[None]:
+ # This is only called for top-level statements, since `visit_suite`
+ # won't visit its children nodes.
+ yield from []
+ newline_leaf = last_leaf(node)
+ if not newline_leaf:
+ return
+ assert (
+ newline_leaf.type == NEWLINE
+ ), f"Unexpectedly found leaf.type={newline_leaf.type}"
+ # We need to find the furthest ancestor with the NEWLINE as the last
+ # leaf, since a `suite` can simply be a `simple_stmt` when it puts
+ # its body on the same line. Example: `if cond: pass`.
+ ancestor = furthest_ancestor_with_last_leaf(newline_leaf)
+ if not _get_line_range(ancestor).intersection(self._lines_set):
+ _convert_node_to_standalone_comment(ancestor)
+
+ def visit_suite(self, node: Node) -> Iterator[None]:
+ yield from []
+ # If there is a STANDALONE_COMMENT node, it means parts of the node tree
+ # have fmt on/off/skip markers. Those STANDALONE_COMMENT nodes can't
+ # be simply converted by calling str(node). So we just don't convert
+ # here.
+ if _contains_standalone_comment(node):
+ return
+ # Find the semantic parent of this suite. For `async_stmt` and
+ # `async_funcdef`, the ASYNC token is defined on a separate level by the
+ # grammar.
+ semantic_parent = node.parent
+ if semantic_parent is not None:
+ if (
+ semantic_parent.prev_sibling is not None
+ and semantic_parent.prev_sibling.type == ASYNC
+ ):
+ semantic_parent = semantic_parent.parent
+ if semantic_parent is not None and not _get_line_range(
+ semantic_parent
+ ).intersection(self._lines_set):
+ _convert_node_to_standalone_comment(semantic_parent)
+
+
+def _convert_unchanged_line_by_line(node: Node, lines_set: set[int]) -> None:
+ """Converts unchanged to STANDALONE_COMMENT line by line."""
+ for leaf in node.leaves():
+ if leaf.type != NEWLINE:
+ # We only consider "unwrapped lines", which are divided by the NEWLINE
+ # token.
+ continue
+ if leaf.parent and leaf.parent.type == syms.match_stmt:
+ # The `suite` node is defined as:
+ # match_stmt: "match" subject_expr ':' NEWLINE INDENT case_block+ DEDENT
+ # Here we need to check `subject_expr`. The `case_block+` will be
+ # checked by their own NEWLINEs.
+ nodes_to_ignore: list[LN] = []
+ prev_sibling = leaf.prev_sibling
+ while prev_sibling:
+ nodes_to_ignore.insert(0, prev_sibling)
+ prev_sibling = prev_sibling.prev_sibling
+ if not _get_line_range(nodes_to_ignore).intersection(lines_set):
+ _convert_nodes_to_standalone_comment(nodes_to_ignore, newline=leaf)
+ elif leaf.parent and leaf.parent.type == syms.suite:
+ # The `suite` node is defined as:
+ # suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT
+ # We will check `simple_stmt` and `stmt+` separately against the lines set
+ parent_sibling = leaf.parent.prev_sibling
+ nodes_to_ignore = []
+ while parent_sibling and parent_sibling.type != syms.suite:
+ # NOTE: Multiple suite nodes can exist as siblings in e.g. `if_stmt`.
+ nodes_to_ignore.insert(0, parent_sibling)
+ parent_sibling = parent_sibling.prev_sibling
+ # Special case for `async_stmt` and `async_funcdef` where the ASYNC
+ # token is on the grandparent node.
+ grandparent = leaf.parent.parent
+ if (
+ grandparent is not None
+ and grandparent.prev_sibling is not None
+ and grandparent.prev_sibling.type == ASYNC
+ ):
+ nodes_to_ignore.insert(0, grandparent.prev_sibling)
+ if not _get_line_range(nodes_to_ignore).intersection(lines_set):
+ _convert_nodes_to_standalone_comment(nodes_to_ignore, newline=leaf)
+ else:
+ ancestor = furthest_ancestor_with_last_leaf(leaf)
+ # Consider multiple decorators as a whole block, as their
+ # newlines have different behaviors than the rest of the grammar.
+ if (
+ ancestor.type == syms.decorator
+ and ancestor.parent
+ and ancestor.parent.type == syms.decorators
+ ):
+ ancestor = ancestor.parent
+ if not _get_line_range(ancestor).intersection(lines_set):
+ _convert_node_to_standalone_comment(ancestor)
+
+
+def _convert_node_to_standalone_comment(node: LN) -> None:
+ """Convert node to STANDALONE_COMMENT by modifying the tree inline."""
+ parent = node.parent
+ if not parent:
+ return
+ first = first_leaf(node)
+ last = last_leaf(node)
+ if not first or not last:
+ return
+ if first is last:
+ # This can happen on the following edge cases:
+ # 1. A block of `# fmt: off/on` code except the `# fmt: on` is placed
+ # on the end of the last line instead of on a new line.
+ # 2. A single backslash on its own line followed by a comment line.
+ # Ideally we don't want to format them when not requested, but fixing
+ # isn't easy. These cases are also badly formatted code, so it isn't
+ # too bad we reformat them.
+ return
+ # The prefix contains comments and indentation whitespaces. They are
+ # reformatted accordingly to the correct indentation level.
+ # This also means the indentation will be changed on the unchanged lines, and
+ # this is actually required to not break incremental reformatting.
+ prefix = first.prefix
+ first.prefix = ""
+ index = node.remove()
+ if index is not None:
+ # Because of the special handling of multiple decorators, if the decorated
+ # item is a single line then there will be a missing newline between the
+ # decorator and item, so add it back. This doesn't affect any other case
+ # since a decorated item with a newline would hit the earlier suite case
+ # in _convert_unchanged_line_by_line that correctly handles the newlines.
+ if node.type == syms.decorated:
+ # A leaf of type decorated wouldn't make sense, since it should always
+ # have at least the decorator + the decorated item, so if this assert
+ # hits that means there's a problem in the parser.
+ assert isinstance(node, Node)
+ # 1 will always be the correct index since before this function is
+ # called all the decorators are collapsed into a single leaf
+ node.insert_child(1, Leaf(NEWLINE, "\n"))
+ # Remove the '\n', as STANDALONE_COMMENT will have '\n' appended when
+ # generating the formatted code.
+ value = str(node)[:-1]
+ parent.insert_child(
+ index,
+ Leaf(
+ STANDALONE_COMMENT,
+ value,
+ prefix=prefix,
+ fmt_pass_converted_first_leaf=first,
+ ),
+ )
+
+
+def _convert_nodes_to_standalone_comment(nodes: Sequence[LN], *, newline: Leaf) -> None:
+ """Convert nodes to STANDALONE_COMMENT by modifying the tree inline."""
+ if not nodes:
+ return
+ parent = nodes[0].parent
+ first = first_leaf(nodes[0])
+ if not parent or not first:
+ return
+ prefix = first.prefix
+ first.prefix = ""
+ value = "".join(str(node) for node in nodes)
+ # The prefix comment on the NEWLINE leaf is the trailing comment of the statement.
+ if newline.prefix:
+ value += newline.prefix
+ newline.prefix = ""
+ index = nodes[0].remove()
+ for node in nodes[1:]:
+ node.remove()
+ if index is not None:
+ parent.insert_child(
+ index,
+ Leaf(
+ STANDALONE_COMMENT,
+ value,
+ prefix=prefix,
+ fmt_pass_converted_first_leaf=first,
+ ),
+ )
+
+
+def _leaf_line_end(leaf: Leaf) -> int:
+ """Returns the line number of the leaf node's last line."""
+ if leaf.type == NEWLINE:
+ return leaf.lineno
+ else:
+ # Leaf nodes like multiline strings can occupy multiple lines.
+ return leaf.lineno + str(leaf).count("\n")
+
+
+def _get_line_range(node_or_nodes: LN | list[LN]) -> set[int]:
+ """Returns the line range of this node or list of nodes."""
+ if isinstance(node_or_nodes, list):
+ nodes = node_or_nodes
+ if not nodes:
+ return set()
+ first = first_leaf(nodes[0])
+ last = last_leaf(nodes[-1])
+ if first and last:
+ line_start = first.lineno
+ line_end = _leaf_line_end(last)
+ return set(range(line_start, line_end + 1))
+ else:
+ return set()
+ else:
+ node = node_or_nodes
+ if isinstance(node, Leaf):
+ return set(range(node.lineno, _leaf_line_end(node) + 1))
+ else:
+ first = first_leaf(node)
+ last = last_leaf(node)
+ if first and last:
+ return set(range(first.lineno, _leaf_line_end(last) + 1))
+ else:
+ return set()
+
+
+@dataclass
+class _LinesMapping:
+ """1-based lines mapping from original source to modified source.
+
+ Lines [original_start, original_end] from original source
+ are mapped to [modified_start, modified_end].
+
+ The ranges are inclusive on both ends.
+ """
+
+ original_start: int
+ original_end: int
+ modified_start: int
+ modified_end: int
+ # Whether this range corresponds to a changed block, or an unchanged block.
+ is_changed_block: bool
+
+
+def _calculate_lines_mappings(
+ original_source: str,
+ modified_source: str,
+) -> Sequence[_LinesMapping]:
+ """Returns a sequence of _LinesMapping by diffing the sources.
+
+ For example, given the following diff:
+ import re
+ - def func(arg1,
+ - arg2, arg3):
+ + def func(arg1, arg2, arg3):
+ pass
+ It returns the following mappings:
+ original -> modified
+ (1, 1) -> (1, 1), is_changed_block=False (the "import re" line)
+ (2, 3) -> (2, 2), is_changed_block=True (the diff)
+ (4, 4) -> (3, 3), is_changed_block=False (the "pass" line)
+
+ You can think of this visually as if it brings up a side-by-side diff, and tries
+ to map the line ranges from the left side to the right side:
+
+ (1, 1)->(1, 1) 1. import re 1. import re
+ (2, 3)->(2, 2) 2. def func(arg1, 2. def func(arg1, arg2, arg3):
+ 3. arg2, arg3):
+ (4, 4)->(3, 3) 4. pass 3. pass
+
+ Args:
+ original_source: the original source.
+ modified_source: the modified source.
+ """
+ matcher = difflib.SequenceMatcher(
+ None,
+ original_source.splitlines(keepends=True),
+ modified_source.splitlines(keepends=True),
+ )
+ matching_blocks = matcher.get_matching_blocks()
+ lines_mappings: list[_LinesMapping] = []
+ # matching_blocks is a sequence of "same block of code ranges", see
+ # https://docs.python.org/3/library/difflib.html#difflib.SequenceMatcher.get_matching_blocks
+ # Each block corresponds to a _LinesMapping with is_changed_block=False,
+ # and the ranges between two blocks corresponds to a _LinesMapping with
+ # is_changed_block=True,
+ # NOTE: matching_blocks is 0-based, but _LinesMapping is 1-based.
+ for i, block in enumerate(matching_blocks):
+ if i == 0:
+ if block.a != 0 or block.b != 0:
+ lines_mappings.append(
+ _LinesMapping(
+ original_start=1,
+ original_end=block.a,
+ modified_start=1,
+ modified_end=block.b,
+ is_changed_block=False,
+ )
+ )
+ else:
+ previous_block = matching_blocks[i - 1]
+ lines_mappings.append(
+ _LinesMapping(
+ original_start=previous_block.a + previous_block.size + 1,
+ original_end=block.a,
+ modified_start=previous_block.b + previous_block.size + 1,
+ modified_end=block.b,
+ is_changed_block=True,
+ )
+ )
+ if i < len(matching_blocks) - 1:
+ lines_mappings.append(
+ _LinesMapping(
+ original_start=block.a + 1,
+ original_end=block.a + block.size,
+ modified_start=block.b + 1,
+ modified_end=block.b + block.size,
+ is_changed_block=False,
+ )
+ )
+ return lines_mappings
+
+
+def _find_lines_mapping_index(
+ original_line: int,
+ lines_mappings: Sequence[_LinesMapping],
+ start_index: int,
+) -> int:
+ """Returns the original index of the lines mappings for the original line."""
+ index = start_index
+ while index < len(lines_mappings):
+ mapping = lines_mappings[index]
+ if mapping.original_start <= original_line <= mapping.original_end:
+ return index
+ index += 1
+ return index
diff --git a/py311/lib/python3.11/site-packages/black/report.py b/py311/lib/python3.11/site-packages/black/report.py
new file mode 100644
index 0000000000000000000000000000000000000000..89899f2f38996f309f79dc1225e24bfa8c69767a
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/report.py
@@ -0,0 +1,107 @@
+"""
+Summarize Black runs to users.
+"""
+
+from dataclasses import dataclass
+from enum import Enum
+from pathlib import Path
+
+from click import style
+
+from black.output import err, out
+
+
+class Changed(Enum):
+ NO = 0
+ CACHED = 1
+ YES = 2
+
+
+class NothingChanged(UserWarning):
+ """Raised when reformatted code is the same as source."""
+
+
+@dataclass
+class Report:
+ """Provides a reformatting counter. Can be rendered with `str(report)`."""
+
+ check: bool = False
+ diff: bool = False
+ quiet: bool = False
+ verbose: bool = False
+ change_count: int = 0
+ same_count: int = 0
+ failure_count: int = 0
+
+ def done(self, src: Path, changed: Changed) -> None:
+ """Increment the counter for successful reformatting. Write out a message."""
+ if changed is Changed.YES:
+ reformatted = "would reformat" if self.check or self.diff else "reformatted"
+ if self.verbose or not self.quiet:
+ out(f"{reformatted} {src}")
+ self.change_count += 1
+ else:
+ if self.verbose:
+ if changed is Changed.NO:
+ msg = f"{src} already well formatted, good job."
+ else:
+ msg = f"{src} wasn't modified on disk since last run."
+ out(msg, bold=False)
+ self.same_count += 1
+
+ def failed(self, src: Path, message: str) -> None:
+ """Increment the counter for failed reformatting. Write out a message."""
+ err(f"error: cannot format {src}: {message}")
+ self.failure_count += 1
+
+ def path_ignored(self, path: Path, message: str) -> None:
+ if self.verbose:
+ out(f"{path} ignored: {message}", bold=False)
+
+ @property
+ def return_code(self) -> int:
+ """Return the exit code that the app should use.
+
+ This considers the current state of changed files and failures:
+ - if there were any failures, return 123;
+ - if any files were changed and --check is being used, return 1;
+ - otherwise return 0.
+ """
+ # According to http://tldp.org/LDP/abs/html/exitcodes.html starting with
+ # 126 we have special return codes reserved by the shell.
+ if self.failure_count:
+ return 123
+
+ elif self.change_count and self.check:
+ return 1
+
+ return 0
+
+ def __str__(self) -> str:
+ """Render a color report of the current state.
+
+ Use `click.unstyle` to remove colors.
+ """
+ if self.check or self.diff:
+ reformatted = "would be reformatted"
+ unchanged = "would be left unchanged"
+ failed = "would fail to reformat"
+ else:
+ reformatted = "reformatted"
+ unchanged = "left unchanged"
+ failed = "failed to reformat"
+ report = []
+ if self.change_count:
+ s = "s" if self.change_count > 1 else ""
+ report.append(
+ style(f"{self.change_count} file{s} ", bold=True, fg="blue")
+ + style(f"{reformatted}", bold=True)
+ )
+
+ if self.same_count:
+ s = "s" if self.same_count > 1 else ""
+ report.append(style(f"{self.same_count} file{s} ", fg="blue") + unchanged)
+ if self.failure_count:
+ s = "s" if self.failure_count > 1 else ""
+ report.append(style(f"{self.failure_count} file{s} {failed}", fg="red"))
+ return ", ".join(report) + "."
diff --git a/py311/lib/python3.11/site-packages/black/resources/__init__.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/resources/__init__.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..b8417ed89fe0bfecdb8ea0c651622faee23e4fe2
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/resources/__init__.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/resources/__init__.py b/py311/lib/python3.11/site-packages/black/resources/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/py311/lib/python3.11/site-packages/black/resources/black.schema.json b/py311/lib/python3.11/site-packages/black/resources/black.schema.json
new file mode 100644
index 0000000000000000000000000000000000000000..bed70a4bb22cad8797c8d1735ce575630ad8c3e4
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/resources/black.schema.json
@@ -0,0 +1,161 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "$id": "https://github.com/psf/black/blob/main/src/black/resources/black.schema.json",
+ "$comment": "tool.black table in pyproject.toml",
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "code": {
+ "type": "string",
+ "description": "Format the code passed in as a string."
+ },
+ "line-length": {
+ "type": "integer",
+ "description": "How many characters per line to allow.",
+ "default": 88
+ },
+ "target-version": {
+ "type": "array",
+ "items": {
+ "enum": [
+ "py33",
+ "py34",
+ "py35",
+ "py36",
+ "py37",
+ "py38",
+ "py39",
+ "py310",
+ "py311",
+ "py312",
+ "py313",
+ "py314"
+ ]
+ },
+ "description": "Python versions that should be supported by Black's output. You should include all versions that your code supports. By default, Black will infer target versions from the project metadata in pyproject.toml. If this does not yield conclusive results, Black will use per-file auto-detection."
+ },
+ "pyi": {
+ "type": "boolean",
+ "description": "Format all input files like typing stubs regardless of file extension. This is useful when piping source on standard input.",
+ "default": false
+ },
+ "ipynb": {
+ "type": "boolean",
+ "description": "Format all input files like Jupyter Notebooks regardless of file extension. This is useful when piping source on standard input.",
+ "default": false
+ },
+ "python-cell-magics": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "When processing Jupyter Notebooks, add the given magic to the list of known python-magics (capture, prun, pypy, python, python3, time, timeit). Useful for formatting cells with custom python magics."
+ },
+ "skip-source-first-line": {
+ "type": "boolean",
+ "description": "Skip the first line of the source code.",
+ "default": false
+ },
+ "skip-string-normalization": {
+ "type": "boolean",
+ "description": "Don't normalize string quotes or prefixes.",
+ "default": false
+ },
+ "skip-magic-trailing-comma": {
+ "type": "boolean",
+ "description": "Don't use trailing commas as a reason to split lines.",
+ "default": false
+ },
+ "preview": {
+ "type": "boolean",
+ "description": "Enable potentially disruptive style changes that may be added to Black's main functionality in the next major release.",
+ "default": false
+ },
+ "unstable": {
+ "type": "boolean",
+ "description": "Enable potentially disruptive style changes that have known bugs or are not currently expected to make it into the stable style Black's next major release. Implies --preview.",
+ "default": false
+ },
+ "enable-unstable-feature": {
+ "type": "array",
+ "items": {
+ "enum": [
+ "string_processing",
+ "hug_parens_with_braces_and_square_brackets",
+ "wrap_long_dict_values_in_parens",
+ "multiline_string_handling",
+ "always_one_newline_after_import",
+ "fix_fmt_skip_in_one_liners",
+ "standardize_type_comments",
+ "wrap_comprehension_in",
+ "remove_parens_around_except_types",
+ "normalize_cr_newlines",
+ "fix_module_docstring_detection",
+ "fix_type_expansion_split",
+ "remove_parens_from_assignment_lhs"
+ ]
+ },
+ "description": "Enable specific features included in the `--unstable` style. Requires `--preview`. No compatibility guarantees are provided on the behavior or existence of any unstable features."
+ },
+ "check": {
+ "type": "boolean",
+ "description": "Don't write the files back, just return the status. Return code 0 means nothing would change. Return code 1 means some files would be reformatted. Return code 123 means there was an internal error.",
+ "default": false
+ },
+ "diff": {
+ "type": "boolean",
+ "description": "Don't write the files back, just output a diff to indicate what changes Black would've made. They are printed to stdout so capturing them is simple.",
+ "default": false
+ },
+ "color": {
+ "type": "boolean",
+ "description": "Show (or do not show) colored diff. Only applies when --diff is given.",
+ "default": false
+ },
+ "fast": {
+ "type": "boolean",
+ "description": "By default, Black performs an AST safety check after formatting your code. The --fast flag turns off this check and the --safe flag explicitly enables it. [default: --safe]",
+ "default": false
+ },
+ "required-version": {
+ "type": "string",
+ "description": "Require a specific version of Black to be running. This is useful for ensuring that all contributors to your project are using the same version, because different versions of Black may format code a little differently. This option can be set in a configuration file for consistent results across environments."
+ },
+ "exclude": {
+ "type": "string",
+ "description": "A regular expression that matches files and directories that should be excluded on recursive searches. An empty value means no paths are excluded. Use forward slashes for directories on all platforms (Windows, too). By default, Black also ignores all paths listed in .gitignore. Changing this value will override all default exclusions. [default: /(\\.direnv|\\.eggs|\\.git|\\.hg|\\.ipynb_checkpoints|\\.mypy_cache|\\.nox|\\.pytest_cache|\\.ruff_cache|\\.tox|\\.svn|\\.venv|\\.vscode|__pypackages__|_build|buck-out|build|dist|venv)/]"
+ },
+ "extend-exclude": {
+ "type": "string",
+ "description": "Like --exclude, but adds additional files and directories on top of the default values instead of overriding them."
+ },
+ "force-exclude": {
+ "type": "string",
+ "description": "Like --exclude, but files and directories matching this regex will be excluded even when they are passed explicitly as arguments. This is useful when invoking Black programmatically on changed files, such as in a pre-commit hook or editor plugin."
+ },
+ "include": {
+ "type": "string",
+ "description": "A regular expression that matches files and directories that should be included on recursive searches. An empty value means all files are included regardless of the name. Use forward slashes for directories on all platforms (Windows, too). Overrides all exclusions, including from .gitignore and command line options.",
+ "default": "(\\.pyi?|\\.ipynb)$"
+ },
+ "workers": {
+ "type": "integer",
+ "description": "When Black formats multiple files, it may use a process pool to speed up formatting. This option controls the number of parallel workers. This can also be specified via the BLACK_NUM_WORKERS environment variable. Defaults to the number of CPUs in the system."
+ },
+ "quiet": {
+ "type": "boolean",
+ "description": "Stop emitting all non-critical output. Error messages will still be emitted (which can silenced by 2>/dev/null).",
+ "default": false
+ },
+ "verbose": {
+ "type": "boolean",
+ "description": "Emit messages about files that were not changed or were ignored due to exclusion patterns. If Black is using a configuration file, a message detailing which one it is using will be emitted.",
+ "default": false
+ },
+ "no-cache": {
+ "type": "boolean",
+ "description": "Skip reading and writing the cache, forcing Black to reformat all included files.",
+ "default": false
+ }
+ }
+}
diff --git a/py311/lib/python3.11/site-packages/black/rusty.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/rusty.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..a4428b5a869d16a4fbd7253de5d3c1fa2315e05a
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/rusty.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/rusty.py b/py311/lib/python3.11/site-packages/black/rusty.py
new file mode 100644
index 0000000000000000000000000000000000000000..ebd4c052d1f37b9eed01d329143c15bfbe0f0e74
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/rusty.py
@@ -0,0 +1,28 @@
+"""An error-handling model influenced by that used by the Rust programming language
+
+See https://doc.rust-lang.org/book/ch09-00-error-handling.html.
+"""
+
+from typing import Generic, TypeVar, Union
+
+T = TypeVar("T")
+E = TypeVar("E", bound=Exception)
+
+
+class Ok(Generic[T]):
+ def __init__(self, value: T) -> None:
+ self._value = value
+
+ def ok(self) -> T:
+ return self._value
+
+
+class Err(Generic[E]):
+ def __init__(self, e: E) -> None:
+ self._e = e
+
+ def err(self) -> E:
+ return self._e
+
+
+Result = Union[Ok[T], Err[E]]
diff --git a/py311/lib/python3.11/site-packages/black/schema.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/schema.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..fd9630b10702bc0e06720f122104723df8649a00
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/schema.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/schema.py b/py311/lib/python3.11/site-packages/black/schema.py
new file mode 100644
index 0000000000000000000000000000000000000000..f534dbb028d579cfaf34efda8c75bdf7e584c1e4
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/schema.py
@@ -0,0 +1,15 @@
+import importlib.resources
+import json
+from typing import Any
+
+
+def get_schema(tool_name: str = "black") -> Any:
+ """Get the stored complete schema for black's settings."""
+ assert tool_name == "black", "Only black is supported."
+
+ pkg = "black.resources"
+ fname = "black.schema.json"
+
+ schema = importlib.resources.files(pkg).joinpath(fname)
+ with schema.open(encoding="utf-8") as f:
+ return json.load(f)
diff --git a/py311/lib/python3.11/site-packages/black/strings.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/strings.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..ac6bd9753dce56473d8806c0f4a4bef0a4bcdeab
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/strings.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/strings.py b/py311/lib/python3.11/site-packages/black/strings.py
new file mode 100644
index 0000000000000000000000000000000000000000..78c9f258fe2f770cd10fd1fdc24b0b104ab662ee
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/strings.py
@@ -0,0 +1,389 @@
+"""
+Simple formatting on strings. Further string formatting code is in trans.py.
+"""
+
+import re
+import sys
+from functools import lru_cache
+from re import Match, Pattern
+from typing import Final
+
+from black._width_table import WIDTH_TABLE
+from blib2to3.pytree import Leaf
+
+STRING_PREFIX_CHARS: Final = "fturbFTURB" # All possible string prefix characters.
+STRING_PREFIX_RE: Final = re.compile(
+ r"^([" + STRING_PREFIX_CHARS + r"]*)(.*)$", re.DOTALL
+)
+UNICODE_ESCAPE_RE: Final = re.compile(
+ r"(?P\\+)(?P"
+ r"(u(?P[a-fA-F0-9]{4}))" # Character with 16-bit hex value xxxx
+ r"|(U(?P[a-fA-F0-9]{8}))" # Character with 32-bit hex value xxxxxxxx
+ r"|(x(?P[a-fA-F0-9]{2}))" # Character with hex value hh
+ r"|(N\{(?P[a-zA-Z0-9 \-]{2,})\})" # Character named name in the Unicode database
+ r")",
+ re.VERBOSE,
+)
+
+
+def sub_twice(regex: Pattern[str], replacement: str, original: str) -> str:
+ """Replace `regex` with `replacement` twice on `original`.
+
+ This is used by string normalization to perform replaces on
+ overlapping matches.
+ """
+ return regex.sub(replacement, regex.sub(replacement, original))
+
+
+def has_triple_quotes(string: str) -> bool:
+ """
+ Returns:
+ True iff @string starts with three quotation characters.
+ """
+ raw_string = string.lstrip(STRING_PREFIX_CHARS)
+ return raw_string[:3] in {'"""', "'''"}
+
+
+def lines_with_leading_tabs_expanded(s: str) -> list[str]:
+ """
+ Splits string into lines and expands only leading tabs (following the normal
+ Python rules)
+ """
+ lines = []
+ for line in s.splitlines():
+ stripped_line = line.lstrip()
+ if not stripped_line or stripped_line == line:
+ lines.append(line)
+ else:
+ prefix_length = len(line) - len(stripped_line)
+ prefix = line[:prefix_length].expandtabs()
+ lines.append(prefix + stripped_line)
+ if s.endswith("\n"):
+ lines.append("")
+ return lines
+
+
+def fix_multiline_docstring(docstring: str, prefix: str) -> str:
+ # https://www.python.org/dev/peps/pep-0257/#handling-docstring-indentation
+ assert docstring, "INTERNAL ERROR: Multiline docstrings cannot be empty"
+ lines = lines_with_leading_tabs_expanded(docstring)
+ # Determine minimum indentation (first line doesn't count):
+ indent = sys.maxsize
+ for line in lines[1:]:
+ stripped = line.lstrip()
+ if stripped:
+ indent = min(indent, len(line) - len(stripped))
+ # Remove indentation (first line is special):
+ trimmed = [lines[0].strip()]
+ if indent < sys.maxsize:
+ last_line_idx = len(lines) - 2
+ for i, line in enumerate(lines[1:]):
+ stripped_line = line[indent:].rstrip()
+ if stripped_line or i == last_line_idx:
+ trimmed.append(prefix + stripped_line)
+ else:
+ trimmed.append("")
+ return "\n".join(trimmed)
+
+
+def get_string_prefix(string: str) -> str:
+ """
+ Pre-conditions:
+ * assert_is_leaf_string(@string)
+
+ Returns:
+ @string's prefix (e.g. '', 'r', 'f', or 'rf').
+ """
+ assert_is_leaf_string(string)
+
+ prefix = []
+ for char in string:
+ if char in STRING_PREFIX_CHARS:
+ prefix.append(char)
+ else:
+ break
+ return "".join(prefix)
+
+
+def assert_is_leaf_string(string: str) -> None:
+ """
+ Checks the pre-condition that @string has the format that you would expect
+ of `leaf.value` where `leaf` is some Leaf such that `leaf.type ==
+ token.STRING`. A more precise description of the pre-conditions that are
+ checked are listed below.
+
+ Pre-conditions:
+ * @string starts with either ', ", ', or " where
+ `set()` is some subset of `set(STRING_PREFIX_CHARS)`.
+ * @string ends with a quote character (' or ").
+
+ Raises:
+ AssertionError(...) if the pre-conditions listed above are not
+ satisfied.
+ """
+ dquote_idx = string.find('"')
+ squote_idx = string.find("'")
+ if -1 in [dquote_idx, squote_idx]:
+ quote_idx = max(dquote_idx, squote_idx)
+ else:
+ quote_idx = min(squote_idx, dquote_idx)
+
+ assert (
+ 0 <= quote_idx < len(string) - 1
+ ), f"{string!r} is missing a starting quote character (' or \")."
+ assert string[-1] in (
+ "'",
+ '"',
+ ), f"{string!r} is missing an ending quote character (' or \")."
+ assert set(string[:quote_idx]).issubset(
+ set(STRING_PREFIX_CHARS)
+ ), f"{set(string[:quote_idx])} is NOT a subset of {set(STRING_PREFIX_CHARS)}."
+
+
+def normalize_string_prefix(s: str) -> str:
+ """Make all string prefixes lowercase."""
+ match = STRING_PREFIX_RE.match(s)
+ assert match is not None, f"failed to match string {s!r}"
+ orig_prefix = match.group(1)
+ new_prefix = (
+ orig_prefix.replace("F", "f")
+ .replace("B", "b")
+ .replace("U", "")
+ .replace("u", "")
+ )
+
+ # Python syntax guarantees max 2 prefixes and that one of them is "r"
+ if len(new_prefix) == 2 and new_prefix[0].lower() != "r":
+ new_prefix = new_prefix[::-1]
+ return f"{new_prefix}{match.group(2)}"
+
+
+# Re(gex) does actually cache patterns internally but this still improves
+# performance on a long list literal of strings by 5-9% since lru_cache's
+# caching overhead is much lower.
+@lru_cache(maxsize=64)
+def _cached_compile(pattern: str) -> Pattern[str]:
+ return re.compile(pattern)
+
+
+def normalize_string_quotes(s: str) -> str:
+ """Prefer double quotes but only if it doesn't cause more escaping.
+
+ Adds or removes backslashes as appropriate.
+ """
+ value = s.lstrip(STRING_PREFIX_CHARS)
+ if value[:3] == '"""':
+ return s
+
+ elif value[:3] == "'''":
+ orig_quote = "'''"
+ new_quote = '"""'
+ elif value[0] == '"':
+ orig_quote = '"'
+ new_quote = "'"
+ else:
+ orig_quote = "'"
+ new_quote = '"'
+ first_quote_pos = s.find(orig_quote)
+ assert first_quote_pos != -1, f"INTERNAL ERROR: Malformed string {s!r}"
+
+ prefix = s[:first_quote_pos]
+ unescaped_new_quote = _cached_compile(rf"(([^\\]|^)(\\\\)*){new_quote}")
+ escaped_new_quote = _cached_compile(rf"([^\\]|^)\\((?:\\\\)*){new_quote}")
+ escaped_orig_quote = _cached_compile(rf"([^\\]|^)\\((?:\\\\)*){orig_quote}")
+ body = s[first_quote_pos + len(orig_quote) : -len(orig_quote)]
+ if "r" in prefix.casefold():
+ if unescaped_new_quote.search(body):
+ # There's at least one unescaped new_quote in this raw string
+ # so converting is impossible
+ return s
+
+ # Do not introduce or remove backslashes in raw strings
+ new_body = body
+ else:
+ # remove unnecessary escapes
+ new_body = sub_twice(escaped_new_quote, rf"\1\2{new_quote}", body)
+ if body != new_body:
+ # Consider the string without unnecessary escapes as the original
+ body = new_body
+ s = f"{prefix}{orig_quote}{body}{orig_quote}"
+ new_body = sub_twice(escaped_orig_quote, rf"\1\2{orig_quote}", new_body)
+ new_body = sub_twice(unescaped_new_quote, rf"\1\\{new_quote}", new_body)
+
+ if "f" in prefix.casefold():
+ matches = re.findall(
+ r"""
+ (?:(? orig_escape_count:
+ return s # Do not introduce more escaping
+
+ if new_escape_count == orig_escape_count and orig_quote == '"':
+ return s # Prefer double quotes
+
+ return f"{prefix}{new_quote}{new_body}{new_quote}"
+
+
+def normalize_fstring_quotes(
+ quote: str,
+ middles: list[Leaf],
+ is_raw_fstring: bool,
+) -> tuple[list[Leaf], str]:
+ """Prefer double quotes but only if it doesn't cause more escaping.
+
+ Adds or removes backslashes as appropriate.
+ """
+ if quote == '"""':
+ return middles, quote
+
+ elif quote == "'''":
+ new_quote = '"""'
+ elif quote == '"':
+ new_quote = "'"
+ else:
+ new_quote = '"'
+
+ unescaped_new_quote = _cached_compile(rf"(([^\\]|^)(\\\\)*){new_quote}")
+ escaped_new_quote = _cached_compile(rf"([^\\]|^)\\((?:\\\\)*){new_quote}")
+ escaped_orig_quote = _cached_compile(rf"([^\\]|^)\\((?:\\\\)*){quote}")
+ if is_raw_fstring:
+ for middle in middles:
+ if unescaped_new_quote.search(middle.value):
+ # There's at least one unescaped new_quote in this raw string
+ # so converting is impossible
+ return middles, quote
+
+ # Do not introduce or remove backslashes in raw strings, just use double quote
+ return middles, '"'
+
+ new_segments = []
+ for middle in middles:
+ segment = middle.value
+ # remove unnecessary escapes
+ new_segment = sub_twice(escaped_new_quote, rf"\1\2{new_quote}", segment)
+ if segment != new_segment:
+ # Consider the string without unnecessary escapes as the original
+ middle.value = new_segment
+
+ new_segment = sub_twice(escaped_orig_quote, rf"\1\2{quote}", new_segment)
+ new_segment = sub_twice(unescaped_new_quote, rf"\1\\{new_quote}", new_segment)
+ new_segments.append(new_segment)
+
+ if new_quote == '"""' and new_segments[-1].endswith('"'):
+ # edge case:
+ new_segments[-1] = new_segments[-1][:-1] + '\\"'
+
+ for middle, new_segment in zip(middles, new_segments, strict=True):
+ orig_escape_count = middle.value.count("\\")
+ new_escape_count = new_segment.count("\\")
+
+ if new_escape_count > orig_escape_count:
+ return middles, quote # Do not introduce more escaping
+
+ if new_escape_count == orig_escape_count and quote == '"':
+ return middles, quote # Prefer double quotes
+
+ for middle, new_segment in zip(middles, new_segments, strict=True):
+ middle.value = new_segment
+
+ return middles, new_quote
+
+
+def normalize_unicode_escape_sequences(leaf: Leaf) -> None:
+ """Replace hex codes in Unicode escape sequences with lowercase representation."""
+ text = leaf.value
+ prefix = get_string_prefix(text)
+ if "r" in prefix.lower():
+ return
+
+ def replace(m: Match[str]) -> str:
+ groups = m.groupdict()
+ back_slashes = groups["backslashes"]
+
+ if len(back_slashes) % 2 == 0:
+ return back_slashes + groups["body"]
+
+ if groups["u"]:
+ # \u
+ return back_slashes + "u" + groups["u"].lower()
+ elif groups["U"]:
+ # \U
+ return back_slashes + "U" + groups["U"].lower()
+ elif groups["x"]:
+ # \x
+ return back_slashes + "x" + groups["x"].lower()
+ else:
+ assert groups["N"], f"Unexpected match: {m}"
+ # \N{}
+ return back_slashes + "N{" + groups["N"].upper() + "}"
+
+ leaf.value = re.sub(UNICODE_ESCAPE_RE, replace, text)
+
+
+@lru_cache(maxsize=4096)
+def char_width(char: str) -> int:
+ """Return the width of a single character as it would be displayed in a
+ terminal or editor (which respects Unicode East Asian Width).
+
+ Full width characters are counted as 2, while half width characters are
+ counted as 1. Also control characters are counted as 0.
+ """
+ table = WIDTH_TABLE
+ codepoint = ord(char)
+ highest = len(table) - 1
+ lowest = 0
+ idx = highest // 2
+ while True:
+ start_codepoint, end_codepoint, width = table[idx]
+ if codepoint < start_codepoint:
+ highest = idx - 1
+ elif codepoint > end_codepoint:
+ lowest = idx + 1
+ else:
+ return 0 if width < 0 else width
+ if highest < lowest:
+ break
+ idx = (highest + lowest) // 2
+ return 1
+
+
+def str_width(line_str: str) -> int:
+ """Return the width of `line_str` as it would be displayed in a terminal
+ or editor (which respects Unicode East Asian Width).
+
+ You could utilize this function to determine, for example, if a string
+ is too wide to display in a terminal or editor.
+ """
+ if line_str.isascii():
+ # Fast path for a line consisting of only ASCII characters
+ return len(line_str)
+ return sum(map(char_width, line_str))
+
+
+def count_chars_in_width(line_str: str, max_width: int) -> int:
+ """Count the number of characters in `line_str` that would fit in a
+ terminal or editor of `max_width` (which respects Unicode East Asian
+ Width).
+ """
+ total_width = 0
+ for i, char in enumerate(line_str):
+ width = char_width(char)
+ if width + total_width > max_width:
+ return i
+ total_width += width
+ return len(line_str)
diff --git a/py311/lib/python3.11/site-packages/black/trans.cpython-311-x86_64-linux-gnu.so b/py311/lib/python3.11/site-packages/black/trans.cpython-311-x86_64-linux-gnu.so
new file mode 100644
index 0000000000000000000000000000000000000000..87c6cdd1b8302ba0ba13915916f9cc4cda0a2d28
Binary files /dev/null and b/py311/lib/python3.11/site-packages/black/trans.cpython-311-x86_64-linux-gnu.so differ
diff --git a/py311/lib/python3.11/site-packages/black/trans.py b/py311/lib/python3.11/site-packages/black/trans.py
new file mode 100644
index 0000000000000000000000000000000000000000..0cb6f6270c8de357005228a66efc238025da2717
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/black/trans.py
@@ -0,0 +1,2511 @@
+"""
+String transformers that can split and merge strings.
+"""
+
+import re
+from abc import ABC, abstractmethod
+from collections import defaultdict
+from collections.abc import Callable, Collection, Iterable, Iterator, Sequence
+from dataclasses import dataclass
+from typing import Any, ClassVar, Final, Literal, TypeVar, Union
+
+from mypy_extensions import trait
+
+from black.comments import contains_pragma_comment
+from black.lines import Line, append_leaves
+from black.mode import Feature, Mode
+from black.nodes import (
+ CLOSING_BRACKETS,
+ OPENING_BRACKETS,
+ STANDALONE_COMMENT,
+ is_empty_lpar,
+ is_empty_par,
+ is_empty_rpar,
+ is_part_of_annotation,
+ parent_type,
+ replace_child,
+ syms,
+)
+from black.rusty import Err, Ok, Result
+from black.strings import (
+ assert_is_leaf_string,
+ count_chars_in_width,
+ get_string_prefix,
+ has_triple_quotes,
+ normalize_string_quotes,
+ str_width,
+)
+from blib2to3.pgen2 import token
+from blib2to3.pytree import Leaf, Node
+
+
+class CannotTransform(Exception):
+ """Base class for errors raised by Transformers."""
+
+
+# types
+T = TypeVar("T")
+LN = Union[Leaf, Node]
+Transformer = Callable[[Line, Collection[Feature], Mode], Iterator[Line]]
+Index = int
+NodeType = int
+ParserState = int
+StringID = int
+TResult = Result[T, CannotTransform] # (T)ransform Result
+TMatchResult = TResult[list[Index]]
+
+SPLIT_SAFE_CHARS = frozenset(["\u3001", "\u3002", "\uff0c"]) # East Asian stops
+
+
+def TErr(err_msg: str) -> Err[CannotTransform]:
+ """(T)ransform Err
+
+ Convenience function used when working with the TResult type.
+ """
+ cant_transform = CannotTransform(err_msg)
+ return Err(cant_transform)
+
+
+def hug_power_op(
+ line: Line, features: Collection[Feature], mode: Mode
+) -> Iterator[Line]:
+ """A transformer which normalizes spacing around power operators."""
+
+ # Performance optimization to avoid unnecessary Leaf clones and other ops.
+ for leaf in line.leaves:
+ if leaf.type == token.DOUBLESTAR:
+ break
+ else:
+ raise CannotTransform("No doublestar token was found in the line.")
+
+ def is_simple_lookup(index: int, kind: Literal[1, -1]) -> bool:
+ # Brackets and parentheses indicate calls, subscripts, etc. ...
+ # basically stuff that doesn't count as "simple". Only a NAME lookup
+ # or dotted lookup (eg. NAME.NAME) is OK.
+ if kind == -1:
+ return handle_is_simple_look_up_prev(line, index, {token.RPAR, token.RSQB})
+ else:
+ return handle_is_simple_lookup_forward(
+ line, index, {token.LPAR, token.LSQB}
+ )
+
+ def is_simple_operand(index: int, kind: Literal[1, -1]) -> bool:
+ # An operand is considered "simple" if's a NAME, a numeric CONSTANT, a simple
+ # lookup (see above), with or without a preceding unary operator.
+ start = line.leaves[index]
+ if start.type in {token.NAME, token.NUMBER}:
+ return is_simple_lookup(index, kind)
+
+ if start.type in {token.PLUS, token.MINUS, token.TILDE}:
+ if line.leaves[index + 1].type in {token.NAME, token.NUMBER}:
+ # kind is always one as bases with a preceding unary op will be checked
+ # for simplicity starting from the next token (so it'll hit the check
+ # above).
+ return is_simple_lookup(index + 1, kind=1)
+
+ return False
+
+ new_line = line.clone()
+ should_hug = False
+ for idx, leaf in enumerate(line.leaves):
+ new_leaf = leaf.clone()
+ if should_hug:
+ new_leaf.prefix = ""
+ should_hug = False
+
+ should_hug = (
+ (0 < idx < len(line.leaves) - 1)
+ and leaf.type == token.DOUBLESTAR
+ and is_simple_operand(idx - 1, kind=-1)
+ and line.leaves[idx - 1].value != "lambda"
+ and is_simple_operand(idx + 1, kind=1)
+ )
+ if should_hug:
+ new_leaf.prefix = ""
+
+ # We have to be careful to make a new line properly:
+ # - bracket related metadata must be maintained (handled by Line.append)
+ # - comments need to copied over, updating the leaf IDs they're attached to
+ new_line.append(new_leaf, preformatted=True)
+ for comment_leaf in line.comments_after(leaf):
+ new_line.append(comment_leaf, preformatted=True)
+
+ yield new_line
+
+
+def handle_is_simple_look_up_prev(line: Line, index: int, disallowed: set[int]) -> bool:
+ """
+ Handling the determination of is_simple_lookup for the lines prior to the doublestar
+ token. This is required because of the need to isolate the chained expression
+ to determine the bracket or parenthesis belong to the single expression.
+ """
+ contains_disallowed = False
+ chain = []
+
+ while 0 <= index < len(line.leaves):
+ current = line.leaves[index]
+ chain.append(current)
+ if not contains_disallowed and current.type in disallowed:
+ contains_disallowed = True
+ if not is_expression_chained(chain):
+ return not contains_disallowed
+
+ index -= 1
+
+ return True
+
+
+def handle_is_simple_lookup_forward(
+ line: Line, index: int, disallowed: set[int]
+) -> bool:
+ """
+ Handling decision is_simple_lookup for the lines behind the doublestar token.
+ This function is simplified to keep consistent with the prior logic and the forward
+ case are more straightforward and do not need to care about chained expressions.
+ """
+ while 0 <= index < len(line.leaves):
+ current = line.leaves[index]
+ if current.type in disallowed:
+ return False
+ if current.type not in {token.NAME, token.DOT} or (
+ current.type == token.NAME and current.value == "for"
+ ):
+ # If the current token isn't disallowed, we'll assume this is simple as
+ # only the disallowed tokens are semantically attached to this lookup
+ # expression we're checking. Also, stop early if we hit the 'for' bit
+ # of a comprehension.
+ return True
+
+ index += 1
+
+ return True
+
+
+def is_expression_chained(chained_leaves: list[Leaf]) -> bool:
+ """
+ Function to determine if the variable is a chained call.
+ (e.g., foo.lookup, foo().lookup, (foo.lookup())) will be recognized as chained call)
+ """
+ if len(chained_leaves) < 2:
+ return True
+
+ current_leaf = chained_leaves[-1]
+ past_leaf = chained_leaves[-2]
+
+ if past_leaf.type == token.NAME:
+ return current_leaf.type in {token.DOT}
+ elif past_leaf.type in {token.RPAR, token.RSQB}:
+ return current_leaf.type in {token.RSQB, token.RPAR}
+ elif past_leaf.type in {token.LPAR, token.LSQB}:
+ return current_leaf.type in {token.NAME, token.LPAR, token.LSQB}
+ else:
+ return False
+
+
+class StringTransformer(ABC):
+ """
+ An implementation of the Transformer protocol that relies on its
+ subclasses overriding the template methods `do_match(...)` and
+ `do_transform(...)`.
+
+ This Transformer works exclusively on strings (for example, by merging
+ or splitting them).
+
+ The following sections can be found among the docstrings of each concrete
+ StringTransformer subclass.
+
+ Requirements:
+ Which requirements must be met of the given Line for this
+ StringTransformer to be applied?
+
+ Transformations:
+ If the given Line meets all of the above requirements, which string
+ transformations can you expect to be applied to it by this
+ StringTransformer?
+
+ Collaborations:
+ What contractual agreements does this StringTransformer have with other
+ StringTransfomers? Such collaborations should be eliminated/minimized
+ as much as possible.
+ """
+
+ __name__: Final = "StringTransformer"
+
+ # Ideally this would be a dataclass, but unfortunately mypyc breaks when used with
+ # `abc.ABC`.
+ def __init__(self, line_length: int, normalize_strings: bool) -> None:
+ self.line_length = line_length
+ self.normalize_strings = normalize_strings
+
+ @abstractmethod
+ def do_match(self, line: Line) -> TMatchResult:
+ """
+ Returns:
+ * Ok(string_indices) such that for each index, `line.leaves[index]`
+ is our target string if a match was able to be made. For
+ transformers that don't result in more lines (e.g. StringMerger,
+ StringParenStripper), multiple matches and transforms are done at
+ once to reduce the complexity.
+ OR
+ * Err(CannotTransform), if no match could be made.
+ """
+
+ @abstractmethod
+ def do_transform(
+ self, line: Line, string_indices: list[int]
+ ) -> Iterator[TResult[Line]]:
+ """
+ Yields:
+ * Ok(new_line) where new_line is the new transformed line.
+ OR
+ * Err(CannotTransform) if the transformation failed for some reason. The
+ `do_match(...)` template method should usually be used to reject
+ the form of the given Line, but in some cases it is difficult to
+ know whether or not a Line meets the StringTransformer's
+ requirements until the transformation is already midway.
+
+ Side Effects:
+ This method should NOT mutate @line directly, but it MAY mutate the
+ Line's underlying Node structure. (WARNING: If the underlying Node
+ structure IS altered, then this method should NOT be allowed to
+ yield an CannotTransform after that point.)
+ """
+
+ def __call__(
+ self, line: Line, _features: Collection[Feature], _mode: Mode
+ ) -> Iterator[Line]:
+ """
+ StringTransformer instances have a call signature that mirrors that of
+ the Transformer type.
+
+ Raises:
+ CannotTransform(...) if the concrete StringTransformer class is unable
+ to transform @line.
+ """
+ # Optimization to avoid calling `self.do_match(...)` when the line does
+ # not contain any string.
+ if not any(leaf.type == token.STRING for leaf in line.leaves):
+ raise CannotTransform("There are no strings in this line.")
+
+ match_result = self.do_match(line)
+
+ if isinstance(match_result, Err):
+ cant_transform = match_result.err()
+ raise CannotTransform(
+ f"The string transformer {self.__class__.__name__} does not recognize"
+ " this line as one that it can transform."
+ ) from cant_transform
+
+ string_indices = match_result.ok()
+
+ for line_result in self.do_transform(line, string_indices):
+ if isinstance(line_result, Err):
+ cant_transform = line_result.err()
+ raise CannotTransform(
+ "StringTransformer failed while attempting to transform string."
+ ) from cant_transform
+ line = line_result.ok()
+ yield line
+
+
+@dataclass
+class CustomSplit:
+ """A custom (i.e. manual) string split.
+
+ A single CustomSplit instance represents a single substring.
+
+ Examples:
+ Consider the following string:
+ ```
+ "Hi there friend."
+ " This is a custom"
+ f" string {split}."
+ ```
+
+ This string will correspond to the following three CustomSplit instances:
+ ```
+ CustomSplit(False, 16)
+ CustomSplit(False, 17)
+ CustomSplit(True, 16)
+ ```
+ """
+
+ has_prefix: bool
+ break_idx: int
+
+
+CustomSplitMapKey = tuple[StringID, str]
+
+
+@trait
+class CustomSplitMapMixin:
+ """
+ This mixin class is used to map merged strings to a sequence of
+ CustomSplits, which will then be used to re-split the strings iff none of
+ the resultant substrings go over the configured max line length.
+ """
+
+ _CUSTOM_SPLIT_MAP: ClassVar[dict[CustomSplitMapKey, tuple[CustomSplit, ...]]] = (
+ defaultdict(tuple)
+ )
+
+ @staticmethod
+ def _get_key(string: str) -> CustomSplitMapKey:
+ """
+ Returns:
+ A unique identifier that is used internally to map @string to a
+ group of custom splits.
+ """
+ return (id(string), string)
+
+ def add_custom_splits(
+ self, string: str, custom_splits: Iterable[CustomSplit]
+ ) -> None:
+ """Custom Split Map Setter Method
+
+ Side Effects:
+ Adds a mapping from @string to the custom splits @custom_splits.
+ """
+ key = self._get_key(string)
+ self._CUSTOM_SPLIT_MAP[key] = tuple(custom_splits)
+
+ def pop_custom_splits(self, string: str) -> list[CustomSplit]:
+ """Custom Split Map Getter Method
+
+ Returns:
+ * A list of the custom splits that are mapped to @string, if any
+ exist.
+ OR
+ * [], otherwise.
+
+ Side Effects:
+ Deletes the mapping between @string and its associated custom
+ splits (which are returned to the caller).
+ """
+ key = self._get_key(string)
+
+ custom_splits = self._CUSTOM_SPLIT_MAP[key]
+ del self._CUSTOM_SPLIT_MAP[key]
+
+ return list(custom_splits)
+
+ def has_custom_splits(self, string: str) -> bool:
+ """
+ Returns:
+ True iff @string is associated with a set of custom splits.
+ """
+ key = self._get_key(string)
+ return key in self._CUSTOM_SPLIT_MAP
+
+
+class StringMerger(StringTransformer, CustomSplitMapMixin):
+ """StringTransformer that merges strings together.
+
+ Requirements:
+ (A) The line contains adjacent strings such that ALL of the validation checks
+ listed in StringMerger._validate_msg(...)'s docstring pass.
+ OR
+ (B) The line contains a string which uses line continuation backslashes.
+
+ Transformations:
+ Depending on which of the two requirements above where met, either:
+
+ (A) The string group associated with the target string is merged.
+ OR
+ (B) All line-continuation backslashes are removed from the target string.
+
+ Collaborations:
+ StringMerger provides custom split information to StringSplitter.
+ """
+
+ def do_match(self, line: Line) -> TMatchResult:
+ LL = line.leaves
+
+ is_valid_index = is_valid_index_factory(LL)
+
+ string_indices = []
+ idx = 0
+ while is_valid_index(idx):
+ leaf = LL[idx]
+ if (
+ leaf.type == token.STRING
+ and is_valid_index(idx + 1)
+ and LL[idx + 1].type == token.STRING
+ ):
+ # Let's check if the string group contains an inline comment
+ # If we have a comment inline, we don't merge the strings
+ contains_comment = False
+ i = idx
+ while is_valid_index(i):
+ if LL[i].type != token.STRING:
+ break
+ if line.comments_after(LL[i]):
+ contains_comment = True
+ break
+ i += 1
+
+ if not contains_comment and not is_part_of_annotation(leaf):
+ string_indices.append(idx)
+
+ # Advance to the next non-STRING leaf.
+ idx += 2
+ while is_valid_index(idx) and LL[idx].type == token.STRING:
+ idx += 1
+
+ elif leaf.type == token.STRING and "\\\n" in leaf.value:
+ string_indices.append(idx)
+ # Advance to the next non-STRING leaf.
+ idx += 1
+ while is_valid_index(idx) and LL[idx].type == token.STRING:
+ idx += 1
+
+ else:
+ idx += 1
+
+ if string_indices:
+ return Ok(string_indices)
+ else:
+ return TErr("This line has no strings that need merging.")
+
+ def do_transform(
+ self, line: Line, string_indices: list[int]
+ ) -> Iterator[TResult[Line]]:
+ new_line = line
+
+ rblc_result = self._remove_backslash_line_continuation_chars(
+ new_line, string_indices
+ )
+ if isinstance(rblc_result, Ok):
+ new_line = rblc_result.ok()
+
+ msg_result = self._merge_string_group(new_line, string_indices)
+ if isinstance(msg_result, Ok):
+ new_line = msg_result.ok()
+
+ if isinstance(rblc_result, Err) and isinstance(msg_result, Err):
+ msg_cant_transform = msg_result.err()
+ rblc_cant_transform = rblc_result.err()
+ cant_transform = CannotTransform(
+ "StringMerger failed to merge any strings in this line."
+ )
+
+ # Chain the errors together using `__cause__`.
+ msg_cant_transform.__cause__ = rblc_cant_transform
+ cant_transform.__cause__ = msg_cant_transform
+
+ yield Err(cant_transform)
+ else:
+ yield Ok(new_line)
+
+ @staticmethod
+ def _remove_backslash_line_continuation_chars(
+ line: Line, string_indices: list[int]
+ ) -> TResult[Line]:
+ """
+ Merge strings that were split across multiple lines using
+ line-continuation backslashes.
+
+ Returns:
+ Ok(new_line), if @line contains backslash line-continuation
+ characters.
+ OR
+ Err(CannotTransform), otherwise.
+ """
+ LL = line.leaves
+
+ indices_to_transform = []
+ for string_idx in string_indices:
+ string_leaf = LL[string_idx]
+ if (
+ string_leaf.type == token.STRING
+ and "\\\n" in string_leaf.value
+ and not has_triple_quotes(string_leaf.value)
+ ):
+ indices_to_transform.append(string_idx)
+
+ if not indices_to_transform:
+ return TErr(
+ "Found no string leaves that contain backslash line continuation"
+ " characters."
+ )
+
+ new_line = line.clone()
+ new_line.comments = line.comments.copy()
+ append_leaves(new_line, line, LL)
+
+ for string_idx in indices_to_transform:
+ new_string_leaf = new_line.leaves[string_idx]
+ new_string_leaf.value = new_string_leaf.value.replace("\\\n", "")
+
+ return Ok(new_line)
+
+ def _merge_string_group(
+ self, line: Line, string_indices: list[int]
+ ) -> TResult[Line]:
+ """
+ Merges string groups (i.e. set of adjacent strings).
+
+ Each index from `string_indices` designates one string group's first
+ leaf in `line.leaves`.
+
+ Returns:
+ Ok(new_line), if ALL of the validation checks found in
+ _validate_msg(...) pass.
+ OR
+ Err(CannotTransform), otherwise.
+ """
+ LL = line.leaves
+
+ is_valid_index = is_valid_index_factory(LL)
+
+ # A dict of {string_idx: tuple[num_of_strings, string_leaf]}.
+ merged_string_idx_dict: dict[int, tuple[int, Leaf]] = {}
+ for string_idx in string_indices:
+ vresult = self._validate_msg(line, string_idx)
+ if isinstance(vresult, Err):
+ continue
+ merged_string_idx_dict[string_idx] = self._merge_one_string_group(
+ LL, string_idx, is_valid_index
+ )
+
+ if not merged_string_idx_dict:
+ return TErr("No string group is merged")
+
+ # Build the final line ('new_line') that this method will later return.
+ new_line = line.clone()
+ previous_merged_string_idx = -1
+ previous_merged_num_of_strings = -1
+ for i, leaf in enumerate(LL):
+ if i in merged_string_idx_dict:
+ previous_merged_string_idx = i
+ previous_merged_num_of_strings, string_leaf = merged_string_idx_dict[i]
+ new_line.append(string_leaf)
+
+ if (
+ previous_merged_string_idx
+ <= i
+ < previous_merged_string_idx + previous_merged_num_of_strings
+ ):
+ for comment_leaf in line.comments_after(leaf):
+ new_line.append(comment_leaf, preformatted=True)
+ continue
+
+ append_leaves(new_line, line, [leaf])
+
+ return Ok(new_line)
+
+ def _merge_one_string_group(
+ self, LL: list[Leaf], string_idx: int, is_valid_index: Callable[[int], bool]
+ ) -> tuple[int, Leaf]:
+ """
+ Merges one string group where the first string in the group is
+ `LL[string_idx]`.
+
+ Returns:
+ A tuple of `(num_of_strings, leaf)` where `num_of_strings` is the
+ number of strings merged and `leaf` is the newly merged string
+ to be replaced in the new line.
+ """
+ # If the string group is wrapped inside an Atom node, we must make sure
+ # to later replace that Atom with our new (merged) string leaf.
+ atom_node = LL[string_idx].parent
+
+ # We will place BREAK_MARK in between every two substrings that we
+ # merge. We will then later go through our final result and use the
+ # various instances of BREAK_MARK we find to add the right values to
+ # the custom split map.
+ BREAK_MARK = "@@@@@ BLACK BREAKPOINT MARKER @@@@@"
+
+ QUOTE = LL[string_idx].value[-1]
+
+ def make_naked(string: str, string_prefix: str) -> str:
+ """Strip @string (i.e. make it a "naked" string)
+
+ Pre-conditions:
+ * assert_is_leaf_string(@string)
+
+ Returns:
+ A string that is identical to @string except that
+ @string_prefix has been stripped, the surrounding QUOTE
+ characters have been removed, and any remaining QUOTE
+ characters have been escaped.
+ """
+ assert_is_leaf_string(string)
+ if "f" in string_prefix:
+ f_expressions = [
+ string[span[0] + 1 : span[1] - 1] # +-1 to get rid of curly braces
+ for span in iter_fexpr_spans(string)
+ ]
+ debug_expressions_contain_visible_quotes = any(
+ re.search(r".*[\'\"].*(?= 0
+ ), "Logic error while filling the custom string breakpoint cache."
+
+ temp_string = temp_string[mark_idx + len(BREAK_MARK) :]
+ breakpoint_idx = mark_idx + (len(prefix) if has_prefix else 0) + 1
+ custom_splits.append(CustomSplit(has_prefix, breakpoint_idx))
+
+ string_leaf = Leaf(token.STRING, S_leaf.value.replace(BREAK_MARK, ""))
+
+ if atom_node is not None:
+ # If not all children of the atom node are merged (this can happen
+ # when there is a standalone comment in the middle) ...
+ if non_string_idx - string_idx < len(atom_node.children):
+ # We need to replace the old STRING leaves with the new string leaf.
+ first_child_idx = LL[string_idx].remove()
+ for idx in range(string_idx + 1, non_string_idx):
+ LL[idx].remove()
+ if first_child_idx is not None:
+ atom_node.insert_child(first_child_idx, string_leaf)
+ else:
+ # Else replace the atom node with the new string leaf.
+ replace_child(atom_node, string_leaf)
+
+ self.add_custom_splits(string_leaf.value, custom_splits)
+ return num_of_strings, string_leaf
+
+ @staticmethod
+ def _validate_msg(line: Line, string_idx: int) -> TResult[None]:
+ """Validate (M)erge (S)tring (G)roup
+
+ Transform-time string validation logic for _merge_string_group(...).
+
+ Returns:
+ * Ok(None), if ALL validation checks (listed below) pass.
+ OR
+ * Err(CannotTransform), if any of the following are true:
+ - The target string group does not contain ANY stand-alone comments.
+ - The target string is not in a string group (i.e. it has no
+ adjacent strings).
+ - The string group has more than one inline comment.
+ - The string group has an inline comment that appears to be a pragma.
+ - The set of all string prefixes in the string group is of
+ length greater than one and is not equal to {"", "f"}.
+ - The string group consists of raw strings.
+ - The string group would merge f-strings with different quote types
+ and internal quotes.
+ - The string group is stringified type annotations. We don't want to
+ process stringified type annotations since pyright doesn't support
+ them spanning multiple string values. (NOTE: mypy, pytype, pyre do
+ support them, so we can change if pyright also gains support in the
+ future. See https://github.com/microsoft/pyright/issues/4359.)
+ """
+ # We first check for "inner" stand-alone comments (i.e. stand-alone
+ # comments that have a string leaf before them AND after them).
+ for inc in [1, -1]:
+ i = string_idx
+ found_sa_comment = False
+ is_valid_index = is_valid_index_factory(line.leaves)
+ while is_valid_index(i) and line.leaves[i].type in [
+ token.STRING,
+ STANDALONE_COMMENT,
+ ]:
+ if line.leaves[i].type == STANDALONE_COMMENT:
+ found_sa_comment = True
+ elif found_sa_comment:
+ return TErr(
+ "StringMerger does NOT merge string groups which contain "
+ "stand-alone comments."
+ )
+
+ i += inc
+
+ QUOTE = line.leaves[string_idx].value[-1]
+
+ num_of_inline_string_comments = 0
+ set_of_prefixes = set()
+ num_of_strings = 0
+ for leaf in line.leaves[string_idx:]:
+ if leaf.type != token.STRING:
+ # If the string group is trailed by a comma, we count the
+ # comments trailing the comma to be one of the string group's
+ # comments.
+ if leaf.type == token.COMMA and id(leaf) in line.comments:
+ num_of_inline_string_comments += 1
+ break
+
+ if has_triple_quotes(leaf.value):
+ return TErr("StringMerger does NOT merge multiline strings.")
+
+ num_of_strings += 1
+ prefix = get_string_prefix(leaf.value).lower()
+ if "r" in prefix:
+ return TErr("StringMerger does NOT merge raw strings.")
+
+ set_of_prefixes.add(prefix)
+
+ if (
+ "f" in prefix
+ and leaf.value[-1] != QUOTE
+ and (
+ "'" in leaf.value[len(prefix) + 1 : -1]
+ or '"' in leaf.value[len(prefix) + 1 : -1]
+ )
+ ):
+ return TErr(
+ "StringMerger does NOT merge f-strings with different quote types"
+ " and internal quotes."
+ )
+
+ if id(leaf) in line.comments:
+ num_of_inline_string_comments += 1
+ if contains_pragma_comment(line.comments[id(leaf)]):
+ return TErr("Cannot merge strings which have pragma comments.")
+
+ if num_of_strings < 2:
+ return TErr(
+ f"Not enough strings to merge (num_of_strings={num_of_strings})."
+ )
+
+ if num_of_inline_string_comments > 1:
+ return TErr(
+ f"Too many inline string comments ({num_of_inline_string_comments})."
+ )
+
+ if len(set_of_prefixes) > 1 and set_of_prefixes != {"", "f"}:
+ return TErr(f"Too many different prefixes ({set_of_prefixes}).")
+
+ return Ok(None)
+
+
+class StringParenStripper(StringTransformer):
+ """StringTransformer that strips surrounding parentheses from strings.
+
+ Requirements:
+ The line contains a string which is surrounded by parentheses and:
+ - The target string is NOT the only argument to a function call.
+ - The target string is NOT a "pointless" string.
+ - The target string is NOT a dictionary value.
+ - If the target string contains a PERCENT, the brackets are not
+ preceded or followed by an operator with higher precedence than
+ PERCENT.
+
+ Transformations:
+ The parentheses mentioned in the 'Requirements' section are stripped.
+
+ Collaborations:
+ StringParenStripper has its own inherent usefulness, but it is also
+ relied on to clean up the parentheses created by StringParenWrapper (in
+ the event that they are no longer needed).
+ """
+
+ def do_match(self, line: Line) -> TMatchResult:
+ LL = line.leaves
+
+ is_valid_index = is_valid_index_factory(LL)
+
+ string_indices = []
+
+ idx = -1
+ while True:
+ idx += 1
+ if idx >= len(LL):
+ break
+ leaf = LL[idx]
+
+ # Should be a string...
+ if leaf.type != token.STRING:
+ continue
+
+ # If this is a "pointless" string...
+ if (
+ leaf.parent
+ and leaf.parent.parent
+ and leaf.parent.parent.type == syms.simple_stmt
+ ):
+ continue
+
+ # Should be preceded by a non-empty LPAR...
+ if (
+ not is_valid_index(idx - 1)
+ or LL[idx - 1].type != token.LPAR
+ or is_empty_lpar(LL[idx - 1])
+ ):
+ continue
+
+ # That LPAR should NOT be preceded by a colon (which could be a
+ # dictionary value), function name, or a closing bracket (which
+ # could be a function returning a function or a list/dictionary
+ # containing a function)...
+ if is_valid_index(idx - 2) and (
+ LL[idx - 2].type == token.COLON
+ or LL[idx - 2].type == token.NAME
+ or LL[idx - 2].type in CLOSING_BRACKETS
+ ):
+ continue
+
+ string_idx = idx
+
+ # Skip the string trailer, if one exists.
+ string_parser = StringParser()
+ next_idx = string_parser.parse(LL, string_idx)
+
+ # if the leaves in the parsed string include a PERCENT, we need to
+ # make sure the initial LPAR is NOT preceded by an operator with
+ # higher or equal precedence to PERCENT
+ if is_valid_index(idx - 2):
+ # mypy can't quite follow unless we name this
+ before_lpar = LL[idx - 2]
+ if token.PERCENT in {leaf.type for leaf in LL[idx - 1 : next_idx]} and (
+ (
+ before_lpar.type
+ in {
+ token.STAR,
+ token.AT,
+ token.SLASH,
+ token.DOUBLESLASH,
+ token.PERCENT,
+ token.TILDE,
+ token.DOUBLESTAR,
+ token.AWAIT,
+ token.LSQB,
+ token.LPAR,
+ }
+ )
+ or (
+ # only unary PLUS/MINUS
+ before_lpar.parent
+ and before_lpar.parent.type == syms.factor
+ and (before_lpar.type in {token.PLUS, token.MINUS})
+ )
+ ):
+ continue
+
+ # Should be followed by a non-empty RPAR...
+ if (
+ is_valid_index(next_idx)
+ and LL[next_idx].type == token.RPAR
+ and not is_empty_rpar(LL[next_idx])
+ ):
+ # That RPAR should NOT be followed by anything with higher
+ # precedence than PERCENT
+ if is_valid_index(next_idx + 1) and LL[next_idx + 1].type in {
+ token.DOUBLESTAR,
+ token.LSQB,
+ token.LPAR,
+ token.DOT,
+ }:
+ continue
+
+ string_indices.append(string_idx)
+ idx = string_idx
+ while idx < len(LL) - 1 and LL[idx + 1].type == token.STRING:
+ idx += 1
+
+ if string_indices:
+ return Ok(string_indices)
+ return TErr("This line has no strings wrapped in parens.")
+
+ def do_transform(
+ self, line: Line, string_indices: list[int]
+ ) -> Iterator[TResult[Line]]:
+ LL = line.leaves
+
+ string_and_rpar_indices: list[int] = []
+ for string_idx in string_indices:
+ string_parser = StringParser()
+ rpar_idx = string_parser.parse(LL, string_idx)
+
+ should_transform = True
+ for leaf in (LL[string_idx - 1], LL[rpar_idx]):
+ if line.comments_after(leaf):
+ # Should not strip parentheses which have comments attached
+ # to them.
+ should_transform = False
+ break
+ if should_transform:
+ string_and_rpar_indices.extend((string_idx, rpar_idx))
+
+ if string_and_rpar_indices:
+ yield Ok(self._transform_to_new_line(line, string_and_rpar_indices))
+ else:
+ yield Err(
+ CannotTransform("All string groups have comments attached to them.")
+ )
+
+ def _transform_to_new_line(
+ self, line: Line, string_and_rpar_indices: list[int]
+ ) -> Line:
+ LL = line.leaves
+
+ new_line = line.clone()
+ new_line.comments = line.comments.copy()
+
+ previous_idx = -1
+ # We need to sort the indices, since string_idx and its matching
+ # rpar_idx may not come in order, e.g. in
+ # `("outer" % ("inner".join(items)))`, the "inner" string's
+ # string_idx is smaller than "outer" string's rpar_idx.
+ for idx in sorted(string_and_rpar_indices):
+ leaf = LL[idx]
+ lpar_or_rpar_idx = idx - 1 if leaf.type == token.STRING else idx
+ append_leaves(new_line, line, LL[previous_idx + 1 : lpar_or_rpar_idx])
+ if leaf.type == token.STRING:
+ string_leaf = Leaf(token.STRING, LL[idx].value)
+ LL[lpar_or_rpar_idx].remove() # Remove lpar.
+ replace_child(LL[idx], string_leaf)
+ new_line.append(string_leaf)
+ # replace comments
+ old_comments = new_line.comments.pop(id(LL[idx]), [])
+ new_line.comments.setdefault(id(string_leaf), []).extend(old_comments)
+ else:
+ LL[lpar_or_rpar_idx].remove() # This is a rpar.
+
+ previous_idx = idx
+
+ # Append the leaves after the last idx:
+ append_leaves(new_line, line, LL[idx + 1 :])
+
+ return new_line
+
+
+class BaseStringSplitter(StringTransformer):
+ """
+ Abstract class for StringTransformers which transform a Line's strings by splitting
+ them or placing them on their own lines where necessary to avoid going over
+ the configured line length.
+
+ Requirements:
+ * The target string value is responsible for the line going over the
+ line length limit. It follows that after all of black's other line
+ split methods have been exhausted, this line (or one of the resulting
+ lines after all line splits are performed) would still be over the
+ line_length limit unless we split this string.
+ AND
+
+ * The target string is NOT a "pointless" string (i.e. a string that has
+ no parent or siblings).
+ AND
+
+ * The target string is not followed by an inline comment that appears
+ to be a pragma.
+ AND
+
+ * The target string is not a multiline (i.e. triple-quote) string.
+ """
+
+ STRING_OPERATORS: Final = [
+ token.EQEQUAL,
+ token.GREATER,
+ token.GREATEREQUAL,
+ token.LESS,
+ token.LESSEQUAL,
+ token.NOTEQUAL,
+ token.PERCENT,
+ token.PLUS,
+ token.STAR,
+ ]
+
+ @abstractmethod
+ def do_splitter_match(self, line: Line) -> TMatchResult:
+ """
+ BaseStringSplitter asks its clients to override this method instead of
+ `StringTransformer.do_match(...)`.
+
+ Follows the same protocol as `StringTransformer.do_match(...)`.
+
+ Refer to `help(StringTransformer.do_match)` for more information.
+ """
+
+ def do_match(self, line: Line) -> TMatchResult:
+ match_result = self.do_splitter_match(line)
+ if isinstance(match_result, Err):
+ return match_result
+
+ string_indices = match_result.ok()
+ assert len(string_indices) == 1, (
+ f"{self.__class__.__name__} should only find one match at a time, found"
+ f" {len(string_indices)}"
+ )
+ string_idx = string_indices[0]
+ vresult = self._validate(line, string_idx)
+ if isinstance(vresult, Err):
+ return vresult
+
+ return match_result
+
+ def _validate(self, line: Line, string_idx: int) -> TResult[None]:
+ """
+ Checks that @line meets all of the requirements listed in this classes'
+ docstring. Refer to `help(BaseStringSplitter)` for a detailed
+ description of those requirements.
+
+ Returns:
+ * Ok(None), if ALL of the requirements are met.
+ OR
+ * Err(CannotTransform), if ANY of the requirements are NOT met.
+ """
+ LL = line.leaves
+
+ string_leaf = LL[string_idx]
+
+ max_string_length = self._get_max_string_length(line, string_idx)
+ if len(string_leaf.value) <= max_string_length:
+ return TErr(
+ "The string itself is not what is causing this line to be too long."
+ )
+
+ if not string_leaf.parent or [L.type for L in string_leaf.parent.children] == [
+ token.STRING,
+ token.NEWLINE,
+ ]:
+ return TErr(
+ f"This string ({string_leaf.value}) appears to be pointless (i.e. has"
+ " no parent)."
+ )
+
+ if id(line.leaves[string_idx]) in line.comments and contains_pragma_comment(
+ line.comments[id(line.leaves[string_idx])]
+ ):
+ return TErr(
+ "Line appears to end with an inline pragma comment. Splitting the line"
+ " could modify the pragma's behavior."
+ )
+
+ if has_triple_quotes(string_leaf.value):
+ return TErr("We cannot split multiline strings.")
+
+ return Ok(None)
+
+ def _get_max_string_length(self, line: Line, string_idx: int) -> int:
+ """
+ Calculates the max string length used when attempting to determine
+ whether or not the target string is responsible for causing the line to
+ go over the line length limit.
+
+ WARNING: This method is tightly coupled to both StringSplitter and
+ (especially) StringParenWrapper. There is probably a better way to
+ accomplish what is being done here.
+
+ Returns:
+ max_string_length: such that `line.leaves[string_idx].value >
+ max_string_length` implies that the target string IS responsible
+ for causing this line to exceed the line length limit.
+ """
+ LL = line.leaves
+
+ is_valid_index = is_valid_index_factory(LL)
+
+ # We use the shorthand "WMA4" in comments to abbreviate "We must
+ # account for". When giving examples, we use STRING to mean some/any
+ # valid string.
+ #
+ # Finally, we use the following convenience variables:
+ #
+ # P: The leaf that is before the target string leaf.
+ # N: The leaf that is after the target string leaf.
+ # NN: The leaf that is after N.
+
+ # WMA4 the whitespace at the beginning of the line.
+ offset = line.depth * 4
+
+ if is_valid_index(string_idx - 1):
+ p_idx = string_idx - 1
+ if (
+ LL[string_idx - 1].type == token.LPAR
+ and LL[string_idx - 1].value == ""
+ and string_idx >= 2
+ ):
+ # If the previous leaf is an empty LPAR placeholder, we should skip it.
+ p_idx -= 1
+
+ P = LL[p_idx]
+ if P.type in self.STRING_OPERATORS:
+ # WMA4 a space and a string operator (e.g. `+ STRING` or `== STRING`).
+ offset += len(str(P)) + 1
+
+ if P.type == token.COMMA:
+ # WMA4 a space, a comma, and a closing bracket [e.g. `), STRING`].
+ offset += 3
+
+ if P.type in [token.COLON, token.EQUAL, token.PLUSEQUAL, token.NAME]:
+ # This conditional branch is meant to handle dictionary keys,
+ # variable assignments, 'return STRING' statement lines, and
+ # 'else STRING' ternary expression lines.
+
+ # WMA4 a single space.
+ offset += 1
+
+ # WMA4 the lengths of any leaves that came before that space,
+ # but after any closing bracket before that space.
+ for leaf in reversed(LL[: p_idx + 1]):
+ offset += len(str(leaf))
+ if leaf.type in CLOSING_BRACKETS:
+ break
+
+ if is_valid_index(string_idx + 1):
+ N = LL[string_idx + 1]
+ if N.type == token.RPAR and N.value == "" and len(LL) > string_idx + 2:
+ # If the next leaf is an empty RPAR placeholder, we should skip it.
+ N = LL[string_idx + 2]
+
+ if N.type == token.COMMA:
+ # WMA4 a single comma at the end of the string (e.g `STRING,`).
+ offset += 1
+
+ if is_valid_index(string_idx + 2):
+ NN = LL[string_idx + 2]
+
+ if N.type == token.DOT and NN.type == token.NAME:
+ # This conditional branch is meant to handle method calls invoked
+ # off of a string literal up to and including the LPAR character.
+
+ # WMA4 the '.' character.
+ offset += 1
+
+ if (
+ is_valid_index(string_idx + 3)
+ and LL[string_idx + 3].type == token.LPAR
+ ):
+ # WMA4 the left parenthesis character.
+ offset += 1
+
+ # WMA4 the length of the method's name.
+ offset += len(NN.value)
+
+ has_comments = False
+ for comment_leaf in line.comments_after(LL[string_idx]):
+ if not has_comments:
+ has_comments = True
+ # WMA4 two spaces before the '#' character.
+ offset += 2
+
+ # WMA4 the length of the inline comment.
+ offset += len(comment_leaf.value)
+
+ max_string_length = count_chars_in_width(str(line), self.line_length - offset)
+ return max_string_length
+
+ @staticmethod
+ def _prefer_paren_wrap_match(LL: list[Leaf]) -> int | None:
+ """
+ Returns:
+ string_idx such that @LL[string_idx] is equal to our target (i.e.
+ matched) string, if this line matches the "prefer paren wrap" statement
+ requirements listed in the 'Requirements' section of the StringParenWrapper
+ class's docstring.
+ OR
+ None, otherwise.
+ """
+ # The line must start with a string.
+ if LL[0].type != token.STRING:
+ return None
+
+ matching_nodes = [
+ syms.listmaker,
+ syms.dictsetmaker,
+ syms.testlist_gexp,
+ ]
+ # If the string is an immediate child of a list/set/tuple literal...
+ if (
+ parent_type(LL[0]) in matching_nodes
+ or parent_type(LL[0].parent) in matching_nodes
+ ):
+ # And the string is surrounded by commas (or is the first/last child)...
+ prev_sibling = LL[0].prev_sibling
+ next_sibling = LL[0].next_sibling
+ if (
+ not prev_sibling
+ and not next_sibling
+ and parent_type(LL[0]) == syms.atom
+ ):
+ # If it's an atom string, we need to check the parent atom's siblings.
+ parent = LL[0].parent
+ assert parent is not None # For type checkers.
+ prev_sibling = parent.prev_sibling
+ next_sibling = parent.next_sibling
+ if (not prev_sibling or prev_sibling.type == token.COMMA) and (
+ not next_sibling or next_sibling.type == token.COMMA
+ ):
+ return 0
+
+ return None
+
+
+def iter_fexpr_spans(s: str) -> Iterator[tuple[int, int]]:
+ """
+ Yields spans corresponding to expressions in a given f-string.
+ Spans are half-open ranges (left inclusive, right exclusive).
+ Assumes the input string is a valid f-string, but will not crash if the input
+ string is invalid.
+ """
+ stack: list[int] = [] # our curly paren stack
+ i = 0
+ while i < len(s):
+ if s[i] == "{":
+ # if we're in a string part of the f-string, ignore escaped curly braces
+ if not stack and i + 1 < len(s) and s[i + 1] == "{":
+ i += 2
+ continue
+ stack.append(i)
+ i += 1
+ continue
+
+ if s[i] == "}":
+ if not stack:
+ i += 1
+ continue
+ j = stack.pop()
+ # we've made it back out of the expression! yield the span
+ if not stack:
+ yield (j, i + 1)
+ i += 1
+ continue
+
+ # if we're in an expression part of the f-string, fast-forward through strings
+ # note that backslashes are not legal in the expression portion of f-strings
+ if stack:
+ delim = None
+ if s[i : i + 3] in ("'''", '"""'):
+ delim = s[i : i + 3]
+ elif s[i] in ("'", '"'):
+ delim = s[i]
+ if delim:
+ i += len(delim)
+ while i < len(s) and s[i : i + len(delim)] != delim:
+ i += 1
+ i += len(delim)
+ continue
+ i += 1
+
+
+def fstring_contains_expr(s: str) -> bool:
+ return any(iter_fexpr_spans(s))
+
+
+def _toggle_fexpr_quotes(fstring: str, old_quote: str) -> str:
+ """
+ Toggles quotes used in f-string expressions that are `old_quote`.
+
+ f-string expressions can't contain backslashes, so we need to toggle the
+ quotes if the f-string itself will end up using the same quote. We can
+ simply toggle without escaping because, quotes can't be reused in f-string
+ expressions. They will fail to parse.
+
+ NOTE: If PEP 701 is accepted, above statement will no longer be true.
+ Though if quotes can be reused, we can simply reuse them without updates or
+ escaping, once Black figures out how to parse the new grammar.
+ """
+ new_quote = "'" if old_quote == '"' else '"'
+ parts = []
+ previous_index = 0
+ for start, end in iter_fexpr_spans(fstring):
+ parts.append(fstring[previous_index:start])
+ parts.append(fstring[start:end].replace(old_quote, new_quote))
+ previous_index = end
+ parts.append(fstring[previous_index:])
+ return "".join(parts)
+
+
+class StringSplitter(BaseStringSplitter, CustomSplitMapMixin):
+ """
+ StringTransformer that splits "atom" strings (i.e. strings which exist on
+ lines by themselves).
+
+ Requirements:
+ * The line consists ONLY of a single string (possibly prefixed by a
+ string operator [e.g. '+' or '==']), MAYBE a string trailer, and MAYBE
+ a trailing comma.
+ AND
+ * All of the requirements listed in BaseStringSplitter's docstring.
+
+ Transformations:
+ The string mentioned in the 'Requirements' section is split into as
+ many substrings as necessary to adhere to the configured line length.
+
+ In the final set of substrings, no substring should be smaller than
+ MIN_SUBSTR_SIZE characters.
+
+ The string will ONLY be split on spaces (i.e. each new substring should
+ start with a space). Note that the string will NOT be split on a space
+ which is escaped with a backslash.
+
+ If the string is an f-string, it will NOT be split in the middle of an
+ f-expression (e.g. in f"FooBar: {foo() if x else bar()}", {foo() if x
+ else bar()} is an f-expression).
+
+ If the string that is being split has an associated set of custom split
+ records and those custom splits will NOT result in any line going over
+ the configured line length, those custom splits are used. Otherwise the
+ string is split as late as possible (from left-to-right) while still
+ adhering to the transformation rules listed above.
+
+ Collaborations:
+ StringSplitter relies on StringMerger to construct the appropriate
+ CustomSplit objects and add them to the custom split map.
+ """
+
+ MIN_SUBSTR_SIZE: Final = 6
+
+ def do_splitter_match(self, line: Line) -> TMatchResult:
+ LL = line.leaves
+
+ if self._prefer_paren_wrap_match(LL) is not None:
+ return TErr("Line needs to be wrapped in parens first.")
+
+ is_valid_index = is_valid_index_factory(LL)
+
+ idx = 0
+
+ # The first two leaves MAY be the 'not in' keywords...
+ if (
+ is_valid_index(idx)
+ and is_valid_index(idx + 1)
+ and [LL[idx].type, LL[idx + 1].type] == [token.NAME, token.NAME]
+ and str(LL[idx]) + str(LL[idx + 1]) == "not in"
+ ):
+ idx += 2
+ # Else the first leaf MAY be a string operator symbol or the 'in' keyword...
+ elif is_valid_index(idx) and (
+ LL[idx].type in self.STRING_OPERATORS
+ or LL[idx].type == token.NAME
+ and str(LL[idx]) == "in"
+ ):
+ idx += 1
+
+ # The next/first leaf MAY be an empty LPAR...
+ if is_valid_index(idx) and is_empty_lpar(LL[idx]):
+ idx += 1
+
+ # The next/first leaf MUST be a string...
+ if not is_valid_index(idx) or LL[idx].type != token.STRING:
+ return TErr("Line does not start with a string.")
+
+ string_idx = idx
+
+ # Skip the string trailer, if one exists.
+ string_parser = StringParser()
+ idx = string_parser.parse(LL, string_idx)
+
+ # That string MAY be followed by an empty RPAR...
+ if is_valid_index(idx) and is_empty_rpar(LL[idx]):
+ idx += 1
+
+ # That string / empty RPAR leaf MAY be followed by a comma...
+ if is_valid_index(idx) and LL[idx].type == token.COMMA:
+ idx += 1
+
+ # But no more leaves are allowed...
+ if is_valid_index(idx):
+ return TErr("This line does not end with a string.")
+
+ return Ok([string_idx])
+
+ def do_transform(
+ self, line: Line, string_indices: list[int]
+ ) -> Iterator[TResult[Line]]:
+ LL = line.leaves
+ assert len(string_indices) == 1, (
+ f"{self.__class__.__name__} should only find one match at a time, found"
+ f" {len(string_indices)}"
+ )
+ string_idx = string_indices[0]
+
+ QUOTE = LL[string_idx].value[-1]
+
+ is_valid_index = is_valid_index_factory(LL)
+ insert_str_child = insert_str_child_factory(LL[string_idx])
+
+ prefix = get_string_prefix(LL[string_idx].value).lower()
+
+ # We MAY choose to drop the 'f' prefix from substrings that don't
+ # contain any f-expressions, but ONLY if the original f-string
+ # contains at least one f-expression. Otherwise, we will alter the AST
+ # of the program.
+ drop_pointless_f_prefix = ("f" in prefix) and fstring_contains_expr(
+ LL[string_idx].value
+ )
+
+ first_string_line = True
+
+ string_op_leaves = self._get_string_operator_leaves(LL)
+ string_op_leaves_length = (
+ sum(len(str(prefix_leaf)) for prefix_leaf in string_op_leaves) + 1
+ if string_op_leaves
+ else 0
+ )
+
+ def maybe_append_string_operators(new_line: Line) -> None:
+ """
+ Side Effects:
+ If @line starts with a string operator and this is the first
+ line we are constructing, this function appends the string
+ operator to @new_line and replaces the old string operator leaf
+ in the node structure. Otherwise this function does nothing.
+ """
+ maybe_prefix_leaves = string_op_leaves if first_string_line else []
+ for i, prefix_leaf in enumerate(maybe_prefix_leaves):
+ replace_child(LL[i], prefix_leaf)
+ new_line.append(prefix_leaf)
+
+ ends_with_comma = (
+ is_valid_index(string_idx + 1) and LL[string_idx + 1].type == token.COMMA
+ )
+
+ def max_last_string_column() -> int:
+ """
+ Returns:
+ The max allowed width of the string value used for the last
+ line we will construct. Note that this value means the width
+ rather than the number of characters (e.g., many East Asian
+ characters expand to two columns).
+ """
+ result = self.line_length
+ result -= line.depth * 4
+ result -= 1 if ends_with_comma else 0
+ result -= string_op_leaves_length
+ return result
+
+ # --- Calculate Max Break Width (for string value)
+ # We start with the line length limit
+ max_break_width = self.line_length
+ # The last index of a string of length N is N-1.
+ max_break_width -= 1
+ # Leading whitespace is not present in the string value (e.g. Leaf.value).
+ max_break_width -= line.depth * 4
+ if max_break_width < 0:
+ yield TErr(
+ f"Unable to split {LL[string_idx].value} at such high of a line depth:"
+ f" {line.depth}"
+ )
+ return
+
+ # Check if StringMerger registered any custom splits.
+ custom_splits = self.pop_custom_splits(LL[string_idx].value)
+ # We use them ONLY if none of them would produce lines that exceed the
+ # line limit.
+ use_custom_breakpoints = bool(
+ custom_splits
+ and all(csplit.break_idx <= max_break_width for csplit in custom_splits)
+ )
+
+ # Temporary storage for the remaining chunk of the string line that
+ # can't fit onto the line currently being constructed.
+ rest_value = LL[string_idx].value
+
+ def more_splits_should_be_made() -> bool:
+ """
+ Returns:
+ True iff `rest_value` (the remaining string value from the last
+ split), should be split again.
+ """
+ if use_custom_breakpoints:
+ return len(custom_splits) > 1
+ else:
+ return str_width(rest_value) > max_last_string_column()
+
+ string_line_results: list[Ok[Line]] = []
+ while more_splits_should_be_made():
+ if use_custom_breakpoints:
+ # Custom User Split (manual)
+ csplit = custom_splits.pop(0)
+ break_idx = csplit.break_idx
+ else:
+ # Algorithmic Split (automatic)
+ max_bidx = (
+ count_chars_in_width(rest_value, max_break_width)
+ - string_op_leaves_length
+ )
+ maybe_break_idx = self._get_break_idx(rest_value, max_bidx)
+ if maybe_break_idx is None:
+ # If we are unable to algorithmically determine a good split
+ # and this string has custom splits registered to it, we
+ # fall back to using them--which means we have to start
+ # over from the beginning.
+ if custom_splits:
+ rest_value = LL[string_idx].value
+ string_line_results = []
+ first_string_line = True
+ use_custom_breakpoints = True
+ continue
+
+ # Otherwise, we stop splitting here.
+ break
+
+ break_idx = maybe_break_idx
+
+ # --- Construct `next_value`
+ next_value = rest_value[:break_idx] + QUOTE
+
+ # HACK: The following 'if' statement is a hack to fix the custom
+ # breakpoint index in the case of either: (a) substrings that were
+ # f-strings but will have the 'f' prefix removed OR (b) substrings
+ # that were not f-strings but will now become f-strings because of
+ # redundant use of the 'f' prefix (i.e. none of the substrings
+ # contain f-expressions but one or more of them had the 'f' prefix
+ # anyway; in which case, we will prepend 'f' to _all_ substrings).
+ #
+ # There is probably a better way to accomplish what is being done
+ # here...
+ #
+ # If this substring is an f-string, we _could_ remove the 'f'
+ # prefix, and the current custom split did NOT originally use a
+ # prefix...
+ if (
+ use_custom_breakpoints
+ and not csplit.has_prefix
+ and (
+ # `next_value == prefix + QUOTE` happens when the custom
+ # split is an empty string.
+ next_value == prefix + QUOTE
+ or next_value != self._normalize_f_string(next_value, prefix)
+ )
+ ):
+ # Then `csplit.break_idx` will be off by one after removing
+ # the 'f' prefix.
+ break_idx += 1
+ next_value = rest_value[:break_idx] + QUOTE
+
+ if drop_pointless_f_prefix:
+ next_value = self._normalize_f_string(next_value, prefix)
+
+ # --- Construct `next_leaf`
+ next_leaf = Leaf(token.STRING, next_value)
+ insert_str_child(next_leaf)
+ self._maybe_normalize_string_quotes(next_leaf)
+
+ # --- Construct `next_line`
+ next_line = line.clone()
+ maybe_append_string_operators(next_line)
+ next_line.append(next_leaf)
+ string_line_results.append(Ok(next_line))
+
+ rest_value = prefix + QUOTE + rest_value[break_idx:]
+ first_string_line = False
+
+ yield from string_line_results
+
+ if drop_pointless_f_prefix:
+ rest_value = self._normalize_f_string(rest_value, prefix)
+
+ rest_leaf = Leaf(token.STRING, rest_value)
+ insert_str_child(rest_leaf)
+
+ # NOTE: I could not find a test case that verifies that the following
+ # line is actually necessary, but it seems to be. Otherwise we risk
+ # not normalizing the last substring, right?
+ self._maybe_normalize_string_quotes(rest_leaf)
+
+ last_line = line.clone()
+ maybe_append_string_operators(last_line)
+
+ # If there are any leaves to the right of the target string...
+ if is_valid_index(string_idx + 1):
+ # We use `temp_value` here to determine how long the last line
+ # would be if we were to append all the leaves to the right of the
+ # target string to the last string line.
+ temp_value = rest_value
+ for leaf in LL[string_idx + 1 :]:
+ temp_value += str(leaf)
+ if leaf.type == token.LPAR:
+ break
+
+ # Try to fit them all on the same line with the last substring...
+ if (
+ str_width(temp_value) <= max_last_string_column()
+ or LL[string_idx + 1].type == token.COMMA
+ ):
+ last_line.append(rest_leaf)
+ append_leaves(last_line, line, LL[string_idx + 1 :])
+ yield Ok(last_line)
+ # Otherwise, place the last substring on one line and everything
+ # else on a line below that...
+ else:
+ last_line.append(rest_leaf)
+ yield Ok(last_line)
+
+ non_string_line = line.clone()
+ append_leaves(non_string_line, line, LL[string_idx + 1 :])
+ yield Ok(non_string_line)
+ # Else the target string was the last leaf...
+ else:
+ last_line.append(rest_leaf)
+ last_line.comments = line.comments.copy()
+ yield Ok(last_line)
+
+ def _iter_nameescape_slices(self, string: str) -> Iterator[tuple[Index, Index]]:
+ r"""
+ Yields:
+ All ranges of @string which, if @string were to be split there,
+ would result in the splitting of an \N{...} expression (which is NOT
+ allowed).
+ """
+ # True - the previous backslash was unescaped
+ # False - the previous backslash was escaped *or* there was no backslash
+ previous_was_unescaped_backslash = False
+ it = iter(enumerate(string))
+ for idx, c in it:
+ if c == "\\":
+ previous_was_unescaped_backslash = not previous_was_unescaped_backslash
+ continue
+ if not previous_was_unescaped_backslash or c != "N":
+ previous_was_unescaped_backslash = False
+ continue
+ previous_was_unescaped_backslash = False
+
+ begin = idx - 1 # the position of backslash before \N{...}
+ for idx, c in it:
+ if c == "}":
+ end = idx
+ break
+ else:
+ # malformed nameescape expression?
+ # should have been detected by AST parsing earlier...
+ raise RuntimeError(f"{self.__class__.__name__} LOGIC ERROR!")
+ yield begin, end
+
+ def _iter_fexpr_slices(self, string: str) -> Iterator[tuple[Index, Index]]:
+ """
+ Yields:
+ All ranges of @string which, if @string were to be split there,
+ would result in the splitting of an f-expression (which is NOT
+ allowed).
+ """
+ if "f" not in get_string_prefix(string).lower():
+ return
+ yield from iter_fexpr_spans(string)
+
+ def _get_illegal_split_indices(self, string: str) -> set[Index]:
+ illegal_indices: set[Index] = set()
+ iterators = [
+ self._iter_fexpr_slices(string),
+ self._iter_nameescape_slices(string),
+ ]
+ for it in iterators:
+ for begin, end in it:
+ illegal_indices.update(range(begin, end))
+ return illegal_indices
+
+ def _get_break_idx(self, string: str, max_break_idx: int) -> int | None:
+ """
+ This method contains the algorithm that StringSplitter uses to
+ determine which character to split each string at.
+
+ Args:
+ @string: The substring that we are attempting to split.
+ @max_break_idx: The ideal break index. We will return this value if it
+ meets all the necessary conditions. In the likely event that it
+ doesn't we will try to find the closest index BELOW @max_break_idx
+ that does. If that fails, we will expand our search by also
+ considering all valid indices ABOVE @max_break_idx.
+
+ Pre-Conditions:
+ * assert_is_leaf_string(@string)
+ * 0 <= @max_break_idx < len(@string)
+
+ Returns:
+ break_idx, if an index is able to be found that meets all of the
+ conditions listed in the 'Transformations' section of this classes'
+ docstring.
+ OR
+ None, otherwise.
+ """
+ is_valid_index = is_valid_index_factory(string)
+
+ assert is_valid_index(max_break_idx)
+ assert_is_leaf_string(string)
+
+ _illegal_split_indices = self._get_illegal_split_indices(string)
+
+ def breaks_unsplittable_expression(i: Index) -> bool:
+ """
+ Returns:
+ True iff returning @i would result in the splitting of an
+ unsplittable expression (which is NOT allowed).
+ """
+ return i in _illegal_split_indices
+
+ def passes_all_checks(i: Index) -> bool:
+ """
+ Returns:
+ True iff ALL of the conditions listed in the 'Transformations'
+ section of this classes' docstring would be met by returning @i.
+ """
+ is_space = string[i] == " "
+ is_split_safe = is_valid_index(i - 1) and string[i - 1] in SPLIT_SAFE_CHARS
+
+ is_not_escaped = True
+ j = i - 1
+ while is_valid_index(j) and string[j] == "\\":
+ is_not_escaped = not is_not_escaped
+ j -= 1
+
+ is_big_enough = (
+ len(string[i:]) >= self.MIN_SUBSTR_SIZE
+ and len(string[:i]) >= self.MIN_SUBSTR_SIZE
+ )
+ return (
+ (is_space or is_split_safe)
+ and is_not_escaped
+ and is_big_enough
+ and not breaks_unsplittable_expression(i)
+ )
+
+ # First, we check all indices BELOW @max_break_idx.
+ break_idx = max_break_idx
+ while is_valid_index(break_idx - 1) and not passes_all_checks(break_idx):
+ break_idx -= 1
+
+ if not passes_all_checks(break_idx):
+ # If that fails, we check all indices ABOVE @max_break_idx.
+ #
+ # If we are able to find a valid index here, the next line is going
+ # to be longer than the specified line length, but it's probably
+ # better than doing nothing at all.
+ break_idx = max_break_idx + 1
+ while is_valid_index(break_idx + 1) and not passes_all_checks(break_idx):
+ break_idx += 1
+
+ if not is_valid_index(break_idx) or not passes_all_checks(break_idx):
+ return None
+
+ return break_idx
+
+ def _maybe_normalize_string_quotes(self, leaf: Leaf) -> None:
+ if self.normalize_strings:
+ leaf.value = normalize_string_quotes(leaf.value)
+
+ def _normalize_f_string(self, string: str, prefix: str) -> str:
+ """
+ Pre-Conditions:
+ * assert_is_leaf_string(@string)
+
+ Returns:
+ * If @string is an f-string that contains no f-expressions, we
+ return a string identical to @string except that the 'f' prefix
+ has been stripped and all double braces (i.e. '{{' or '}}') have
+ been normalized (i.e. turned into '{' or '}').
+ OR
+ * Otherwise, we return @string.
+ """
+ assert_is_leaf_string(string)
+
+ if "f" in prefix and not fstring_contains_expr(string):
+ new_prefix = prefix.replace("f", "")
+
+ temp = string[len(prefix) :]
+ temp = re.sub(r"\{\{", "{", temp)
+ temp = re.sub(r"\}\}", "}", temp)
+ new_string = temp
+
+ return f"{new_prefix}{new_string}"
+ else:
+ return string
+
+ def _get_string_operator_leaves(self, leaves: Iterable[Leaf]) -> list[Leaf]:
+ LL = list(leaves)
+
+ string_op_leaves = []
+ i = 0
+ while LL[i].type in self.STRING_OPERATORS + [token.NAME]:
+ prefix_leaf = Leaf(LL[i].type, str(LL[i]).strip())
+ string_op_leaves.append(prefix_leaf)
+ i += 1
+ return string_op_leaves
+
+
+class StringParenWrapper(BaseStringSplitter, CustomSplitMapMixin):
+ """
+ StringTransformer that wraps strings in parens and then splits at the LPAR.
+
+ Requirements:
+ All of the requirements listed in BaseStringSplitter's docstring in
+ addition to the requirements listed below:
+
+ * The line is a return/yield statement, which returns/yields a string.
+ OR
+ * The line is part of a ternary expression (e.g. `x = y if cond else
+ z`) such that the line starts with `else `, where is
+ some string.
+ OR
+ * The line is an assert statement, which ends with a string.
+ OR
+ * The line is an assignment statement (e.g. `x = ` or `x +=
+ `) such that the variable is being assigned the value of some
+ string.
+ OR
+ * The line is a dictionary key assignment where some valid key is being
+ assigned the value of some string.
+ OR
+ * The line is an lambda expression and the value is a string.
+ OR
+ * The line starts with an "atom" string that prefers to be wrapped in
+ parens. It's preferred to be wrapped when it's is an immediate child of
+ a list/set/tuple literal, AND the string is surrounded by commas (or is
+ the first/last child).
+
+ Transformations:
+ The chosen string is wrapped in parentheses and then split at the LPAR.
+
+ We then have one line which ends with an LPAR and another line that
+ starts with the chosen string. The latter line is then split again at
+ the RPAR. This results in the RPAR (and possibly a trailing comma)
+ being placed on its own line.
+
+ NOTE: If any leaves exist to the right of the chosen string (except
+ for a trailing comma, which would be placed after the RPAR), those
+ leaves are placed inside the parentheses. In effect, the chosen
+ string is not necessarily being "wrapped" by parentheses. We can,
+ however, count on the LPAR being placed directly before the chosen
+ string.
+
+ In other words, StringParenWrapper creates "atom" strings. These
+ can then be split again by StringSplitter, if necessary.
+
+ Collaborations:
+ In the event that a string line split by StringParenWrapper is
+ changed such that it no longer needs to be given its own line,
+ StringParenWrapper relies on StringParenStripper to clean up the
+ parentheses it created.
+
+ For "atom" strings that prefers to be wrapped in parens, it requires
+ StringSplitter to hold the split until the string is wrapped in parens.
+ """
+
+ def do_splitter_match(self, line: Line) -> TMatchResult:
+ LL = line.leaves
+
+ if line.leaves[-1].type in OPENING_BRACKETS:
+ return TErr(
+ "Cannot wrap parens around a line that ends in an opening bracket."
+ )
+
+ string_idx = (
+ self._return_match(LL)
+ or self._else_match(LL)
+ or self._assert_match(LL)
+ or self._assign_match(LL)
+ or self._dict_or_lambda_match(LL)
+ or self._prefer_paren_wrap_match(LL)
+ )
+
+ if string_idx is not None:
+ string_value = line.leaves[string_idx].value
+ # If the string has neither spaces nor East Asian stops...
+ if not any(
+ char == " " or char in SPLIT_SAFE_CHARS for char in string_value
+ ):
+ # And will still violate the line length limit when split...
+ max_string_width = self.line_length - ((line.depth + 1) * 4)
+ if str_width(string_value) > max_string_width:
+ # And has no associated custom splits...
+ if not self.has_custom_splits(string_value):
+ # Then we should NOT put this string on its own line.
+ return TErr(
+ "We do not wrap long strings in parentheses when the"
+ " resultant line would still be over the specified line"
+ " length and can't be split further by StringSplitter."
+ )
+ return Ok([string_idx])
+
+ return TErr("This line does not contain any non-atomic strings.")
+
+ @staticmethod
+ def _return_match(LL: list[Leaf]) -> int | None:
+ """
+ Returns:
+ string_idx such that @LL[string_idx] is equal to our target (i.e.
+ matched) string, if this line matches the return/yield statement
+ requirements listed in the 'Requirements' section of this classes'
+ docstring.
+ OR
+ None, otherwise.
+ """
+ # If this line is a part of a return/yield statement and the first leaf
+ # contains either the "return" or "yield" keywords...
+ if parent_type(LL[0]) in [syms.return_stmt, syms.yield_expr] and LL[
+ 0
+ ].value in ["return", "yield"]:
+ is_valid_index = is_valid_index_factory(LL)
+
+ idx = 2 if is_valid_index(1) and is_empty_par(LL[1]) else 1
+ # The next visible leaf MUST contain a string...
+ if is_valid_index(idx) and LL[idx].type == token.STRING:
+ return idx
+
+ return None
+
+ @staticmethod
+ def _else_match(LL: list[Leaf]) -> int | None:
+ """
+ Returns:
+ string_idx such that @LL[string_idx] is equal to our target (i.e.
+ matched) string, if this line matches the ternary expression
+ requirements listed in the 'Requirements' section of this classes'
+ docstring.
+ OR
+ None, otherwise.
+ """
+ # If this line is a part of a ternary expression and the first leaf
+ # contains the "else" keyword...
+ if (
+ parent_type(LL[0]) == syms.test
+ and LL[0].type == token.NAME
+ and LL[0].value == "else"
+ ):
+ is_valid_index = is_valid_index_factory(LL)
+
+ idx = 2 if is_valid_index(1) and is_empty_par(LL[1]) else 1
+ # The next visible leaf MUST contain a string...
+ if is_valid_index(idx) and LL[idx].type == token.STRING:
+ return idx
+
+ return None
+
+ @staticmethod
+ def _assert_match(LL: list[Leaf]) -> int | None:
+ """
+ Returns:
+ string_idx such that @LL[string_idx] is equal to our target (i.e.
+ matched) string, if this line matches the assert statement
+ requirements listed in the 'Requirements' section of this classes'
+ docstring.
+ OR
+ None, otherwise.
+ """
+ # If this line is a part of an assert statement and the first leaf
+ # contains the "assert" keyword...
+ if parent_type(LL[0]) == syms.assert_stmt and LL[0].value == "assert":
+ is_valid_index = is_valid_index_factory(LL)
+
+ for i, leaf in enumerate(LL):
+ # We MUST find a comma...
+ if leaf.type == token.COMMA:
+ idx = i + 2 if is_empty_par(LL[i + 1]) else i + 1
+
+ # That comma MUST be followed by a string...
+ if is_valid_index(idx) and LL[idx].type == token.STRING:
+ string_idx = idx
+
+ # Skip the string trailer, if one exists.
+ string_parser = StringParser()
+ idx = string_parser.parse(LL, string_idx)
+
+ # But no more leaves are allowed...
+ if not is_valid_index(idx):
+ return string_idx
+
+ return None
+
+ @staticmethod
+ def _assign_match(LL: list[Leaf]) -> int | None:
+ """
+ Returns:
+ string_idx such that @LL[string_idx] is equal to our target (i.e.
+ matched) string, if this line matches the assignment statement
+ requirements listed in the 'Requirements' section of this classes'
+ docstring.
+ OR
+ None, otherwise.
+ """
+ # If this line is a part of an expression statement or is a function
+ # argument AND the first leaf contains a variable name...
+ if (
+ parent_type(LL[0]) in [syms.expr_stmt, syms.argument, syms.power]
+ and LL[0].type == token.NAME
+ ):
+ is_valid_index = is_valid_index_factory(LL)
+
+ for i, leaf in enumerate(LL):
+ # We MUST find either an '=' or '+=' symbol...
+ if leaf.type in [token.EQUAL, token.PLUSEQUAL]:
+ idx = i + 2 if is_empty_par(LL[i + 1]) else i + 1
+
+ # That symbol MUST be followed by a string...
+ if is_valid_index(idx) and LL[idx].type == token.STRING:
+ string_idx = idx
+
+ # Skip the string trailer, if one exists.
+ string_parser = StringParser()
+ idx = string_parser.parse(LL, string_idx)
+
+ # The next leaf MAY be a comma iff this line is a part
+ # of a function argument...
+ if (
+ parent_type(LL[0]) == syms.argument
+ and is_valid_index(idx)
+ and LL[idx].type == token.COMMA
+ ):
+ idx += 1
+
+ # But no more leaves are allowed...
+ if not is_valid_index(idx):
+ return string_idx
+
+ return None
+
+ @staticmethod
+ def _dict_or_lambda_match(LL: list[Leaf]) -> int | None:
+ """
+ Returns:
+ string_idx such that @LL[string_idx] is equal to our target (i.e.
+ matched) string, if this line matches the dictionary key assignment
+ statement or lambda expression requirements listed in the
+ 'Requirements' section of this classes' docstring.
+ OR
+ None, otherwise.
+ """
+ # If this line is a part of a dictionary key assignment or lambda expression...
+ parent_types = [parent_type(LL[0]), parent_type(LL[0].parent)]
+ if syms.dictsetmaker in parent_types or syms.lambdef in parent_types:
+ is_valid_index = is_valid_index_factory(LL)
+
+ for i, leaf in enumerate(LL):
+ # We MUST find a colon, it can either be dict's or lambda's colon...
+ if leaf.type == token.COLON and i < len(LL) - 1:
+ idx = i + 2 if is_empty_par(LL[i + 1]) else i + 1
+
+ # That colon MUST be followed by a string...
+ if is_valid_index(idx) and LL[idx].type == token.STRING:
+ string_idx = idx
+
+ # Skip the string trailer, if one exists.
+ string_parser = StringParser()
+ idx = string_parser.parse(LL, string_idx)
+
+ # That string MAY be followed by a comma...
+ if is_valid_index(idx) and LL[idx].type == token.COMMA:
+ idx += 1
+
+ # But no more leaves are allowed...
+ if not is_valid_index(idx):
+ return string_idx
+
+ return None
+
+ def do_transform(
+ self, line: Line, string_indices: list[int]
+ ) -> Iterator[TResult[Line]]:
+ LL = line.leaves
+ assert len(string_indices) == 1, (
+ f"{self.__class__.__name__} should only find one match at a time, found"
+ f" {len(string_indices)}"
+ )
+ string_idx = string_indices[0]
+
+ is_valid_index = is_valid_index_factory(LL)
+ insert_str_child = insert_str_child_factory(LL[string_idx])
+
+ comma_idx = -1
+ ends_with_comma = False
+ if LL[comma_idx].type == token.COMMA:
+ ends_with_comma = True
+
+ leaves_to_steal_comments_from = [LL[string_idx]]
+ if ends_with_comma:
+ leaves_to_steal_comments_from.append(LL[comma_idx])
+
+ # --- First Line
+ first_line = line.clone()
+ left_leaves = LL[:string_idx]
+
+ # We have to remember to account for (possibly invisible) LPAR and RPAR
+ # leaves that already wrapped the target string. If these leaves do
+ # exist, we will replace them with our own LPAR and RPAR leaves.
+ old_parens_exist = False
+ if left_leaves and left_leaves[-1].type == token.LPAR:
+ old_parens_exist = True
+ leaves_to_steal_comments_from.append(left_leaves[-1])
+ left_leaves.pop()
+
+ append_leaves(first_line, line, left_leaves)
+
+ lpar_leaf = Leaf(token.LPAR, "(")
+ if old_parens_exist:
+ replace_child(LL[string_idx - 1], lpar_leaf)
+ else:
+ insert_str_child(lpar_leaf)
+ first_line.append(lpar_leaf)
+
+ # We throw inline comments that were originally to the right of the
+ # target string to the top line. They will now be shown to the right of
+ # the LPAR.
+ for leaf in leaves_to_steal_comments_from:
+ for comment_leaf in line.comments_after(leaf):
+ first_line.append(comment_leaf, preformatted=True)
+
+ yield Ok(first_line)
+
+ # --- Middle (String) Line
+ # We only need to yield one (possibly too long) string line, since the
+ # `StringSplitter` will break it down further if necessary.
+ string_value = LL[string_idx].value
+ string_line = Line(
+ mode=line.mode,
+ depth=line.depth + 1,
+ inside_brackets=True,
+ should_split_rhs=line.should_split_rhs,
+ magic_trailing_comma=line.magic_trailing_comma,
+ )
+ string_leaf = Leaf(token.STRING, string_value)
+ insert_str_child(string_leaf)
+ string_line.append(string_leaf)
+
+ old_rpar_leaf = None
+ if is_valid_index(string_idx + 1):
+ right_leaves = LL[string_idx + 1 :]
+ if ends_with_comma:
+ right_leaves.pop()
+
+ if old_parens_exist:
+ assert right_leaves and right_leaves[-1].type == token.RPAR, (
+ "Apparently, old parentheses do NOT exist?!"
+ f" (left_leaves={left_leaves}, right_leaves={right_leaves})"
+ )
+ old_rpar_leaf = right_leaves.pop()
+ elif right_leaves and right_leaves[-1].type == token.RPAR:
+ # Special case for lambda expressions as dict's value, e.g.:
+ # my_dict = {
+ # "key": lambda x: f"formatted: {x}",
+ # }
+ # After wrapping the dict's value with parentheses, the string is
+ # followed by a RPAR but its opening bracket is lambda's, not
+ # the string's:
+ # "key": (lambda x: f"formatted: {x}"),
+ opening_bracket = right_leaves[-1].opening_bracket
+ if opening_bracket is not None and opening_bracket in left_leaves:
+ index = left_leaves.index(opening_bracket)
+ if (
+ 0 < index < len(left_leaves) - 1
+ and left_leaves[index - 1].type == token.COLON
+ and left_leaves[index + 1].value == "lambda"
+ ):
+ right_leaves.pop()
+
+ append_leaves(string_line, line, right_leaves)
+
+ yield Ok(string_line)
+
+ # --- Last Line
+ last_line = line.clone()
+ last_line.bracket_tracker = first_line.bracket_tracker
+
+ new_rpar_leaf = Leaf(token.RPAR, ")")
+ if old_rpar_leaf is not None:
+ replace_child(old_rpar_leaf, new_rpar_leaf)
+ else:
+ insert_str_child(new_rpar_leaf)
+ last_line.append(new_rpar_leaf)
+
+ # If the target string ended with a comma, we place this comma to the
+ # right of the RPAR on the last line.
+ if ends_with_comma:
+ comma_leaf = Leaf(token.COMMA, ",")
+ replace_child(LL[comma_idx], comma_leaf)
+ last_line.append(comma_leaf)
+
+ yield Ok(last_line)
+
+
+class StringParser:
+ """
+ A state machine that aids in parsing a string's "trailer", which can be
+ either non-existent, an old-style formatting sequence (e.g. `% varX` or `%
+ (varX, varY)`), or a method-call / attribute access (e.g. `.format(varX,
+ varY)`).
+
+ NOTE: A new StringParser object MUST be instantiated for each string
+ trailer we need to parse.
+
+ Examples:
+ We shall assume that `line` equals the `Line` object that corresponds
+ to the following line of python code:
+ ```
+ x = "Some {}.".format("String") + some_other_string
+ ```
+
+ Furthermore, we will assume that `string_idx` is some index such that:
+ ```
+ assert line.leaves[string_idx].value == "Some {}."
+ ```
+
+ The following code snippet then holds:
+ ```
+ string_parser = StringParser()
+ idx = string_parser.parse(line.leaves, string_idx)
+ assert line.leaves[idx].type == token.PLUS
+ ```
+ """
+
+ DEFAULT_TOKEN: Final = 20210605
+
+ # String Parser States
+ START: Final = 1
+ DOT: Final = 2
+ NAME: Final = 3
+ PERCENT: Final = 4
+ SINGLE_FMT_ARG: Final = 5
+ LPAR: Final = 6
+ RPAR: Final = 7
+ DONE: Final = 8
+
+ # Lookup Table for Next State
+ _goto: Final[dict[tuple[ParserState, NodeType], ParserState]] = {
+ # A string trailer may start with '.' OR '%'.
+ (START, token.DOT): DOT,
+ (START, token.PERCENT): PERCENT,
+ (START, DEFAULT_TOKEN): DONE,
+ # A '.' MUST be followed by an attribute or method name.
+ (DOT, token.NAME): NAME,
+ # A method name MUST be followed by an '(', whereas an attribute name
+ # is the last symbol in the string trailer.
+ (NAME, token.LPAR): LPAR,
+ (NAME, DEFAULT_TOKEN): DONE,
+ # A '%' symbol can be followed by an '(' or a single argument (e.g. a
+ # string or variable name).
+ (PERCENT, token.LPAR): LPAR,
+ (PERCENT, DEFAULT_TOKEN): SINGLE_FMT_ARG,
+ # If a '%' symbol is followed by a single argument, that argument is
+ # the last leaf in the string trailer.
+ (SINGLE_FMT_ARG, DEFAULT_TOKEN): DONE,
+ # If present, a ')' symbol is the last symbol in a string trailer.
+ # (NOTE: LPARS and nested RPARS are not included in this lookup table,
+ # since they are treated as a special case by the parsing logic in this
+ # classes' implementation.)
+ (RPAR, DEFAULT_TOKEN): DONE,
+ }
+
+ def __init__(self) -> None:
+ self._state = self.START
+ self._unmatched_lpars = 0
+
+ def parse(self, leaves: list[Leaf], string_idx: int) -> int:
+ """
+ Pre-conditions:
+ * @leaves[@string_idx].type == token.STRING
+
+ Returns:
+ The index directly after the last leaf which is a part of the string
+ trailer, if a "trailer" exists.
+ OR
+ @string_idx + 1, if no string "trailer" exists.
+ """
+ assert leaves[string_idx].type == token.STRING
+
+ idx = string_idx + 1
+ while idx < len(leaves) and self._next_state(leaves[idx]):
+ idx += 1
+ return idx
+
+ def _next_state(self, leaf: Leaf) -> bool:
+ """
+ Pre-conditions:
+ * On the first call to this function, @leaf MUST be the leaf that
+ was directly after the string leaf in question (e.g. if our target
+ string is `line.leaves[i]` then the first call to this method must
+ be `line.leaves[i + 1]`).
+ * On the next call to this function, the leaf parameter passed in
+ MUST be the leaf directly following @leaf.
+
+ Returns:
+ True iff @leaf is a part of the string's trailer.
+ """
+ # We ignore empty LPAR or RPAR leaves.
+ if is_empty_par(leaf):
+ return True
+
+ next_token = leaf.type
+ if next_token == token.LPAR:
+ self._unmatched_lpars += 1
+
+ current_state = self._state
+
+ # The LPAR parser state is a special case. We will return True until we
+ # find the matching RPAR token.
+ if current_state == self.LPAR:
+ if next_token == token.RPAR:
+ self._unmatched_lpars -= 1
+ if self._unmatched_lpars == 0:
+ self._state = self.RPAR
+ # Otherwise, we use a lookup table to determine the next state.
+ else:
+ # If the lookup table matches the current state to the next
+ # token, we use the lookup table.
+ if (current_state, next_token) in self._goto:
+ self._state = self._goto[current_state, next_token]
+ else:
+ # Otherwise, we check if a the current state was assigned a
+ # default.
+ if (current_state, self.DEFAULT_TOKEN) in self._goto:
+ self._state = self._goto[current_state, self.DEFAULT_TOKEN]
+ # If no default has been assigned, then this parser has a logic
+ # error.
+ else:
+ raise RuntimeError(f"{self.__class__.__name__} LOGIC ERROR!")
+
+ if self._state == self.DONE:
+ return False
+
+ return True
+
+
+def insert_str_child_factory(string_leaf: Leaf) -> Callable[[LN], None]:
+ """
+ Factory for a convenience function that is used to orphan @string_leaf
+ and then insert multiple new leaves into the same part of the node
+ structure that @string_leaf had originally occupied.
+
+ Examples:
+ Let `string_leaf = Leaf(token.STRING, '"foo"')` and `N =
+ string_leaf.parent`. Assume the node `N` has the following
+ original structure:
+
+ Node(
+ expr_stmt, [
+ Leaf(NAME, 'x'),
+ Leaf(EQUAL, '='),
+ Leaf(STRING, '"foo"'),
+ ]
+ )
+
+ We then run the code snippet shown below.
+ ```
+ insert_str_child = insert_str_child_factory(string_leaf)
+
+ lpar = Leaf(token.LPAR, '(')
+ insert_str_child(lpar)
+
+ bar = Leaf(token.STRING, '"bar"')
+ insert_str_child(bar)
+
+ rpar = Leaf(token.RPAR, ')')
+ insert_str_child(rpar)
+ ```
+
+ After which point, it follows that `string_leaf.parent is None` and
+ the node `N` now has the following structure:
+
+ Node(
+ expr_stmt, [
+ Leaf(NAME, 'x'),
+ Leaf(EQUAL, '='),
+ Leaf(LPAR, '('),
+ Leaf(STRING, '"bar"'),
+ Leaf(RPAR, ')'),
+ ]
+ )
+ """
+ string_parent = string_leaf.parent
+ string_child_idx = string_leaf.remove()
+
+ def insert_str_child(child: LN) -> None:
+ nonlocal string_child_idx
+
+ assert string_parent is not None
+ assert string_child_idx is not None
+
+ string_parent.insert_child(string_child_idx, child)
+ string_child_idx += 1
+
+ return insert_str_child
+
+
+def is_valid_index_factory(seq: Sequence[Any]) -> Callable[[int], bool]:
+ """
+ Examples:
+ ```
+ my_list = [1, 2, 3]
+
+ is_valid_index = is_valid_index_factory(my_list)
+
+ assert is_valid_index(0)
+ assert is_valid_index(2)
+
+ assert not is_valid_index(3)
+ assert not is_valid_index(-1)
+ ```
+ """
+
+ def is_valid_index(idx: int) -> bool:
+ """
+ Returns:
+ True iff @idx is positive AND seq[@idx] does NOT raise an
+ IndexError.
+ """
+ return 0 <= idx < len(seq)
+
+ return is_valid_index
diff --git a/py311/lib/python3.11/site-packages/botocore/__init__.py b/py311/lib/python3.11/site-packages/botocore/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..8e6fa3ffc5bb2382fec47ec155e810c960eeefdc
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/__init__.py
@@ -0,0 +1,215 @@
+# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+import logging
+import os
+import re
+from logging import NullHandler
+
+__version__ = '1.42.27'
+
+
+# Configure default logger to do nothing
+log = logging.getLogger('botocore')
+log.addHandler(NullHandler())
+
+_INITIALIZERS = []
+
+_first_cap_regex = re.compile('(.)([A-Z][a-z]+)')
+_end_cap_regex = re.compile('([a-z0-9])([A-Z])')
+# The regex below handles the special case where some acronym
+# name is pluralized, e.g GatewayARNs, ListWebACLs, SomeCNAMEs.
+_special_case_transform = re.compile('[A-Z]{2,}s$')
+# Prepopulate the cache with special cases that don't match
+# our regular transformation.
+_xform_cache = {
+ ('CreateCachediSCSIVolume', '_'): 'create_cached_iscsi_volume',
+ ('CreateCachediSCSIVolume', '-'): 'create-cached-iscsi-volume',
+ ('DescribeCachediSCSIVolumes', '_'): 'describe_cached_iscsi_volumes',
+ ('DescribeCachediSCSIVolumes', '-'): 'describe-cached-iscsi-volumes',
+ ('DescribeStorediSCSIVolumes', '_'): 'describe_stored_iscsi_volumes',
+ ('DescribeStorediSCSIVolumes', '-'): 'describe-stored-iscsi-volumes',
+ ('CreateStorediSCSIVolume', '_'): 'create_stored_iscsi_volume',
+ ('CreateStorediSCSIVolume', '-'): 'create-stored-iscsi-volume',
+ ('ListHITsForQualificationType', '_'): 'list_hits_for_qualification_type',
+ ('ListHITsForQualificationType', '-'): 'list-hits-for-qualification-type',
+ ('ExecutePartiQLStatement', '_'): 'execute_partiql_statement',
+ ('ExecutePartiQLStatement', '-'): 'execute-partiql-statement',
+ ('ExecutePartiQLTransaction', '_'): 'execute_partiql_transaction',
+ ('ExecutePartiQLTransaction', '-'): 'execute-partiql-transaction',
+ ('ExecutePartiQLBatch', '_'): 'execute_partiql_batch',
+ ('ExecutePartiQLBatch', '-'): 'execute-partiql-batch',
+ (
+ 'AssociateWhatsAppBusinessAccount',
+ '_',
+ ): 'associate_whatsapp_business_account',
+ (
+ 'AssociateWhatsAppBusinessAccount',
+ '-',
+ ): 'associate-whatsapp-business-account',
+ ('CreateWhatsAppMessageTemplate', '_'): 'create_whatsapp_message_template',
+ ('CreateWhatsAppMessageTemplate', '-'): 'create-whatsapp-message-template',
+ (
+ 'CreateWhatsAppMessageTemplateFromLibrary',
+ '_',
+ ): 'create_whatsapp_message_template_from_library',
+ (
+ 'CreateWhatsAppMessageTemplateFromLibrary',
+ '-',
+ ): 'create-whatsapp-message-template-from-library',
+ (
+ 'CreateWhatsAppMessageTemplateMedia',
+ '_',
+ ): 'create_whatsapp_message_template_media',
+ (
+ 'CreateWhatsAppMessageTemplateMedia',
+ '-',
+ ): 'create-whatsapp-message-template-media',
+ ('DeleteWhatsAppMessageMedia', '_'): 'delete_whatsapp_message_media',
+ ('DeleteWhatsAppMessageMedia', '-'): 'delete-whatsapp-message-media',
+ ('DeleteWhatsAppMessageTemplate', '_'): 'delete_whatsapp_message_template',
+ ('DeleteWhatsAppMessageTemplate', '-'): 'delete-whatsapp-message-template',
+ (
+ 'DisassociateWhatsAppBusinessAccount',
+ '_',
+ ): 'disassociate_whatsapp_business_account',
+ (
+ 'DisassociateWhatsAppBusinessAccount',
+ '-',
+ ): 'disassociate-whatsapp-business-account',
+ (
+ 'GetLinkedWhatsAppBusinessAccount',
+ '_',
+ ): 'get_linked_whatsapp_business_account',
+ (
+ 'GetLinkedWhatsAppBusinessAccount',
+ '-',
+ ): 'get-linked-whatsapp-business-account',
+ (
+ 'GetLinkedWhatsAppBusinessAccountPhoneNumber',
+ '_',
+ ): 'get_linked_whatsapp_business_account_phone_number',
+ (
+ 'GetLinkedWhatsAppBusinessAccountPhoneNumber',
+ '-',
+ ): 'get-linked-whatsapp-business-account-phone-number',
+ ('GetWhatsAppMessageMedia', '_'): 'get_whatsapp_message_media',
+ ('GetWhatsAppMessageMedia', '-'): 'get-whatsapp-message-media',
+ ('GetWhatsAppMessageTemplate', '_'): 'get_whatsapp_message_template',
+ ('GetWhatsAppMessageTemplate', '-'): 'get-whatsapp-message-template',
+ (
+ 'ListLinkedWhatsAppBusinessAccounts',
+ '_',
+ ): 'list_linked_whatsapp_business_accounts',
+ (
+ 'ListLinkedWhatsAppBusinessAccounts',
+ '-',
+ ): 'list-linked-whatsapp-business-accounts',
+ ('ListWhatsAppMessageTemplates', '_'): 'list_whatsapp_message_templates',
+ ('ListWhatsAppMessageTemplates', '-'): 'list-whatsapp-message-templates',
+ ('ListWhatsAppTemplateLibrary', '_'): 'list_whatsapp_template_library',
+ ('ListWhatsAppTemplateLibrary', '-'): 'list-whatsapp-template-library',
+ ('PostWhatsAppMessageMedia', '_'): 'post_whatsapp_message_media',
+ ('PostWhatsAppMessageMedia', '-'): 'post-whatsapp-message-media',
+ (
+ 'PutWhatsAppBusinessAccountEventDestinations',
+ '_',
+ ): 'put_whatsapp_business_account_event_destinations',
+ (
+ 'PutWhatsAppBusinessAccountEventDestinations',
+ '-',
+ ): 'put-whatsapp-business-account-event-destinations',
+ ('SendWhatsAppMessage', '_'): 'send_whatsapp_message',
+ ('SendWhatsAppMessage', '-'): 'send-whatsapp-message',
+ ('UpdateWhatsAppMessageTemplate', '_'): 'update_whatsapp_message_template',
+ ('UpdateWhatsAppMessageTemplate', '-'): 'update-whatsapp-message-template',
+}
+ScalarTypes = ('string', 'integer', 'boolean', 'timestamp', 'float', 'double')
+
+BOTOCORE_ROOT = os.path.dirname(os.path.abspath(__file__))
+
+
+# Used to specify anonymous (unsigned) request signature
+class UNSIGNED:
+ def __copy__(self):
+ return self
+
+ def __deepcopy__(self, memodict):
+ return self
+
+
+UNSIGNED = UNSIGNED()
+
+
+def xform_name(name, sep='_', _xform_cache=_xform_cache):
+ """Convert camel case to a "pythonic" name.
+
+ If the name contains the ``sep`` character, then it is
+ returned unchanged.
+
+ """
+ if sep in name:
+ # If the sep is in the name, assume that it's already
+ # transformed and return the string unchanged.
+ return name
+ key = (name, sep)
+ if key not in _xform_cache:
+ if _special_case_transform.search(name) is not None:
+ is_special = _special_case_transform.search(name)
+ matched = is_special.group()
+ # Replace something like ARNs, ACLs with _arns, _acls.
+ name = f"{name[: -len(matched)]}{sep}{matched.lower()}"
+ s1 = _first_cap_regex.sub(r'\1' + sep + r'\2', name)
+ transformed = _end_cap_regex.sub(r'\1' + sep + r'\2', s1).lower()
+ _xform_cache[key] = transformed
+ return _xform_cache[key]
+
+
+def register_initializer(callback):
+ """Register an initializer function for session creation.
+
+ This initializer function will be invoked whenever a new
+ `botocore.session.Session` is instantiated.
+
+ :type callback: callable
+ :param callback: A callable that accepts a single argument
+ of type `botocore.session.Session`.
+
+ """
+ _INITIALIZERS.append(callback)
+
+
+def unregister_initializer(callback):
+ """Unregister an initializer function.
+
+ :type callback: callable
+ :param callback: A callable that was previously registered
+ with `botocore.register_initializer`.
+
+ :raises ValueError: If a callback is provided that is not currently
+ registered as an initializer.
+
+ """
+ _INITIALIZERS.remove(callback)
+
+
+def invoke_initializers(session):
+ """Invoke all initializers for a session.
+
+ :type session: botocore.session.Session
+ :param session: The session to initialize.
+
+ """
+ for initializer in _INITIALIZERS:
+ initializer(session)
diff --git a/py311/lib/python3.11/site-packages/botocore/__pycache__/args.cpython-311.pyc b/py311/lib/python3.11/site-packages/botocore/__pycache__/args.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d487db7b2b47ad450aca4d2cebe16eec5eb3f25b
Binary files /dev/null and b/py311/lib/python3.11/site-packages/botocore/__pycache__/args.cpython-311.pyc differ
diff --git a/py311/lib/python3.11/site-packages/botocore/__pycache__/auth.cpython-311.pyc b/py311/lib/python3.11/site-packages/botocore/__pycache__/auth.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..453e694a080e2c81a9cd5ffef615deb45020b09d
Binary files /dev/null and b/py311/lib/python3.11/site-packages/botocore/__pycache__/auth.cpython-311.pyc differ
diff --git a/py311/lib/python3.11/site-packages/botocore/__pycache__/configloader.cpython-311.pyc b/py311/lib/python3.11/site-packages/botocore/__pycache__/configloader.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8a5ca52de57e075dc36431a3e6e5e2088c3ff2cd
Binary files /dev/null and b/py311/lib/python3.11/site-packages/botocore/__pycache__/configloader.cpython-311.pyc differ
diff --git a/py311/lib/python3.11/site-packages/botocore/__pycache__/discovery.cpython-311.pyc b/py311/lib/python3.11/site-packages/botocore/__pycache__/discovery.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7b15173868aca0b5f3a38c7edef1f546ae5ba5c9
Binary files /dev/null and b/py311/lib/python3.11/site-packages/botocore/__pycache__/discovery.cpython-311.pyc differ
diff --git a/py311/lib/python3.11/site-packages/botocore/args.py b/py311/lib/python3.11/site-packages/botocore/args.py
new file mode 100644
index 0000000000000000000000000000000000000000..2a76e879ef007aed47dc36295c8aff3db03e9a9a
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/args.py
@@ -0,0 +1,998 @@
+# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+"""Internal module to help with normalizing botocore client args.
+
+This module (and all function/classes within this module) should be
+considered internal, and *not* a public API.
+
+"""
+
+import copy
+import logging
+import socket
+
+import botocore.exceptions
+import botocore.parsers
+import botocore.serialize
+from botocore.config import Config
+from botocore.endpoint import EndpointCreator
+from botocore.regions import EndpointResolverBuiltins as EPRBuiltins
+from botocore.regions import EndpointRulesetResolver
+from botocore.signers import RequestSigner
+from botocore.useragent import UserAgentString, register_feature_id
+from botocore.utils import (
+ PRIORITY_ORDERED_SUPPORTED_PROTOCOLS, # noqa: F401
+ ensure_boolean,
+ is_s3_accelerate_url,
+)
+
+logger = logging.getLogger(__name__)
+
+
+VALID_REGIONAL_ENDPOINTS_CONFIG = [
+ 'legacy',
+ 'regional',
+]
+LEGACY_GLOBAL_STS_REGIONS = [
+ 'ap-northeast-1',
+ 'ap-south-1',
+ 'ap-southeast-1',
+ 'ap-southeast-2',
+ 'aws-global',
+ 'ca-central-1',
+ 'eu-central-1',
+ 'eu-north-1',
+ 'eu-west-1',
+ 'eu-west-2',
+ 'eu-west-3',
+ 'sa-east-1',
+ 'us-east-1',
+ 'us-east-2',
+ 'us-west-1',
+ 'us-west-2',
+]
+# Maximum allowed length of the ``user_agent_appid`` config field. Longer
+# values result in a warning-level log message.
+USERAGENT_APPID_MAXLEN = 50
+
+VALID_REQUEST_CHECKSUM_CALCULATION_CONFIG = (
+ "when_supported",
+ "when_required",
+)
+VALID_RESPONSE_CHECKSUM_VALIDATION_CONFIG = (
+ "when_supported",
+ "when_required",
+)
+
+
+VALID_ACCOUNT_ID_ENDPOINT_MODE_CONFIG = (
+ 'preferred',
+ 'disabled',
+ 'required',
+)
+
+
+class ClientArgsCreator:
+ def __init__(
+ self,
+ event_emitter,
+ user_agent,
+ response_parser_factory,
+ loader,
+ exceptions_factory,
+ config_store,
+ user_agent_creator=None,
+ ):
+ self._event_emitter = event_emitter
+ self._response_parser_factory = response_parser_factory
+ self._loader = loader
+ self._exceptions_factory = exceptions_factory
+ self._config_store = config_store
+ if user_agent_creator is None:
+ self._session_ua_creator = UserAgentString.from_environment()
+ else:
+ self._session_ua_creator = user_agent_creator
+
+ def get_client_args(
+ self,
+ service_model,
+ region_name,
+ is_secure,
+ endpoint_url,
+ verify,
+ credentials,
+ scoped_config,
+ client_config,
+ endpoint_bridge,
+ auth_token=None,
+ endpoints_ruleset_data=None,
+ partition_data=None,
+ ):
+ final_args = self.compute_client_args(
+ service_model,
+ client_config,
+ endpoint_bridge,
+ region_name,
+ endpoint_url,
+ is_secure,
+ scoped_config,
+ )
+
+ service_name = final_args['service_name'] # noqa
+ parameter_validation = final_args['parameter_validation']
+ endpoint_config = final_args['endpoint_config']
+ protocol = final_args['protocol']
+ config_kwargs = final_args['config_kwargs']
+ s3_config = final_args['s3_config']
+ partition = endpoint_config['metadata'].get('partition', None)
+ socket_options = final_args['socket_options']
+ configured_endpoint_url = final_args['configured_endpoint_url']
+ signing_region = endpoint_config['signing_region']
+ endpoint_region_name = endpoint_config['region_name']
+ account_id_endpoint_mode = config_kwargs['account_id_endpoint_mode']
+
+ event_emitter = copy.copy(self._event_emitter)
+ signer = RequestSigner(
+ service_model.service_id,
+ signing_region,
+ endpoint_config['signing_name'],
+ endpoint_config['signature_version'],
+ credentials,
+ event_emitter,
+ auth_token,
+ )
+
+ config_kwargs['s3'] = s3_config
+ new_config = Config(**config_kwargs)
+ endpoint_creator = EndpointCreator(event_emitter)
+
+ endpoint = endpoint_creator.create_endpoint(
+ service_model,
+ region_name=endpoint_region_name,
+ endpoint_url=endpoint_config['endpoint_url'],
+ verify=verify,
+ response_parser_factory=self._response_parser_factory,
+ max_pool_connections=new_config.max_pool_connections,
+ proxies=new_config.proxies,
+ timeout=(new_config.connect_timeout, new_config.read_timeout),
+ socket_options=socket_options,
+ client_cert=new_config.client_cert,
+ proxies_config=new_config.proxies_config,
+ )
+
+ # Emit event to allow service-specific or customer customization of serializer kwargs
+ event_name = f'creating-serializer.{service_name}'
+ serializer_kwargs = {
+ 'timestamp_precision': botocore.serialize.TIMESTAMP_PRECISION_DEFAULT
+ }
+ event_emitter.emit(
+ event_name,
+ protocol_name=protocol,
+ service_model=service_model,
+ serializer_kwargs=serializer_kwargs,
+ )
+
+ serializer = botocore.serialize.create_serializer(
+ protocol,
+ parameter_validation,
+ timestamp_precision=serializer_kwargs['timestamp_precision'],
+ )
+ response_parser = botocore.parsers.create_parser(protocol)
+
+ ruleset_resolver = self._build_endpoint_resolver(
+ endpoints_ruleset_data,
+ partition_data,
+ client_config,
+ service_model,
+ endpoint_region_name,
+ region_name,
+ configured_endpoint_url,
+ endpoint,
+ is_secure,
+ endpoint_bridge,
+ event_emitter,
+ credentials,
+ account_id_endpoint_mode,
+ )
+
+ # Copy the session's user agent factory and adds client configuration.
+ client_ua_creator = self._session_ua_creator.with_client_config(
+ new_config
+ )
+ supplied_ua = client_config.user_agent if client_config else None
+ new_config._supplied_user_agent = supplied_ua
+
+ return {
+ 'serializer': serializer,
+ 'endpoint': endpoint,
+ 'response_parser': response_parser,
+ 'event_emitter': event_emitter,
+ 'request_signer': signer,
+ 'service_model': service_model,
+ 'loader': self._loader,
+ 'client_config': new_config,
+ 'partition': partition,
+ 'exceptions_factory': self._exceptions_factory,
+ 'endpoint_ruleset_resolver': ruleset_resolver,
+ 'user_agent_creator': client_ua_creator,
+ }
+
+ def compute_client_args(
+ self,
+ service_model,
+ client_config,
+ endpoint_bridge,
+ region_name,
+ endpoint_url,
+ is_secure,
+ scoped_config,
+ ):
+ service_name = service_model.endpoint_prefix
+ protocol = service_model.resolved_protocol
+ parameter_validation = True
+ if client_config and not client_config.parameter_validation:
+ parameter_validation = False
+ elif scoped_config:
+ raw_value = scoped_config.get('parameter_validation')
+ if raw_value is not None:
+ parameter_validation = ensure_boolean(raw_value)
+
+ s3_config = self.compute_s3_config(client_config)
+
+ configured_endpoint_url = self._compute_configured_endpoint_url(
+ client_config=client_config,
+ endpoint_url=endpoint_url,
+ )
+ if configured_endpoint_url is not None:
+ register_feature_id('ENDPOINT_OVERRIDE')
+
+ endpoint_config = self._compute_endpoint_config(
+ service_name=service_name,
+ region_name=region_name,
+ endpoint_url=configured_endpoint_url,
+ is_secure=is_secure,
+ endpoint_bridge=endpoint_bridge,
+ s3_config=s3_config,
+ )
+ endpoint_variant_tags = endpoint_config['metadata'].get('tags', [])
+
+ # Some third-party libraries expect the final user-agent string in
+ # ``client.meta.config.user_agent``. To maintain backwards
+ # compatibility, the preliminary user-agent string (before any Config
+ # object modifications and without request-specific user-agent
+ # components) is stored in the new Config object's ``user_agent``
+ # property but not used by Botocore itself.
+ preliminary_ua_string = self._session_ua_creator.with_client_config(
+ client_config
+ ).to_string()
+ # Create a new client config to be passed to the client based
+ # on the final values. We do not want the user to be able
+ # to try to modify an existing client with a client config.
+ config_kwargs = dict(
+ region_name=endpoint_config['region_name'],
+ signature_version=endpoint_config['signature_version'],
+ user_agent=preliminary_ua_string,
+ )
+ if 'dualstack' in endpoint_variant_tags:
+ config_kwargs.update(use_dualstack_endpoint=True)
+ if 'fips' in endpoint_variant_tags:
+ config_kwargs.update(use_fips_endpoint=True)
+ if client_config is not None:
+ config_kwargs.update(
+ connect_timeout=client_config.connect_timeout,
+ read_timeout=client_config.read_timeout,
+ max_pool_connections=client_config.max_pool_connections,
+ proxies=client_config.proxies,
+ proxies_config=client_config.proxies_config,
+ retries=client_config.retries,
+ client_cert=client_config.client_cert,
+ inject_host_prefix=client_config.inject_host_prefix,
+ tcp_keepalive=client_config.tcp_keepalive,
+ user_agent_extra=client_config.user_agent_extra,
+ user_agent_appid=client_config.user_agent_appid,
+ request_min_compression_size_bytes=(
+ client_config.request_min_compression_size_bytes
+ ),
+ disable_request_compression=(
+ client_config.disable_request_compression
+ ),
+ client_context_params=client_config.client_context_params,
+ sigv4a_signing_region_set=(
+ client_config.sigv4a_signing_region_set
+ ),
+ request_checksum_calculation=(
+ client_config.request_checksum_calculation
+ ),
+ response_checksum_validation=(
+ client_config.response_checksum_validation
+ ),
+ account_id_endpoint_mode=client_config.account_id_endpoint_mode,
+ auth_scheme_preference=client_config.auth_scheme_preference,
+ )
+ self._compute_retry_config(config_kwargs)
+ self._compute_connect_timeout(config_kwargs)
+ self._compute_user_agent_appid_config(config_kwargs)
+ self._compute_request_compression_config(config_kwargs)
+ self._compute_sigv4a_signing_region_set_config(config_kwargs)
+ self._compute_checksum_config(config_kwargs)
+ self._compute_account_id_endpoint_mode_config(config_kwargs)
+ self._compute_inject_host_prefix(client_config, config_kwargs)
+ self._compute_auth_scheme_preference_config(
+ client_config, config_kwargs
+ )
+ self._compute_signature_version_config(client_config, config_kwargs)
+ s3_config = self.compute_s3_config(client_config)
+
+ is_s3_service = self._is_s3_service(service_name)
+
+ if is_s3_service and 'dualstack' in endpoint_variant_tags:
+ if s3_config is None:
+ s3_config = {}
+ s3_config['use_dualstack_endpoint'] = True
+
+ return {
+ 'service_name': service_name,
+ 'parameter_validation': parameter_validation,
+ 'configured_endpoint_url': configured_endpoint_url,
+ 'endpoint_config': endpoint_config,
+ 'protocol': protocol,
+ 'config_kwargs': config_kwargs,
+ 's3_config': s3_config,
+ 'socket_options': self._compute_socket_options(
+ scoped_config, client_config
+ ),
+ }
+
+ def _compute_inject_host_prefix(self, client_config, config_kwargs):
+ # In the cases that a Config object was not provided, or the private value
+ # remained UNSET, we should resolve the value from the config store.
+ if (
+ client_config is None
+ or client_config._inject_host_prefix == 'UNSET'
+ ):
+ configured_disable_host_prefix_injection = (
+ self._config_store.get_config_variable(
+ 'disable_host_prefix_injection'
+ )
+ )
+ if configured_disable_host_prefix_injection is not None:
+ config_kwargs[
+ 'inject_host_prefix'
+ ] = not configured_disable_host_prefix_injection
+ else:
+ config_kwargs['inject_host_prefix'] = True
+
+ def _compute_configured_endpoint_url(self, client_config, endpoint_url):
+ if endpoint_url is not None:
+ return endpoint_url
+
+ if self._ignore_configured_endpoint_urls(client_config):
+ logger.debug("Ignoring configured endpoint URLs.")
+ return endpoint_url
+
+ return self._config_store.get_config_variable('endpoint_url')
+
+ def _ignore_configured_endpoint_urls(self, client_config):
+ if (
+ client_config
+ and client_config.ignore_configured_endpoint_urls is not None
+ ):
+ return client_config.ignore_configured_endpoint_urls
+
+ return self._config_store.get_config_variable(
+ 'ignore_configured_endpoint_urls'
+ )
+
+ def compute_s3_config(self, client_config):
+ s3_configuration = self._config_store.get_config_variable('s3')
+
+ # Next specific client config values takes precedence over
+ # specific values in the scoped config.
+ if client_config is not None:
+ if client_config.s3 is not None:
+ if s3_configuration is None:
+ s3_configuration = client_config.s3
+ else:
+ # The current s3_configuration dictionary may be
+ # from a source that only should be read from so
+ # we want to be safe and just make a copy of it to modify
+ # before it actually gets updated.
+ s3_configuration = s3_configuration.copy()
+ s3_configuration.update(client_config.s3)
+
+ return s3_configuration
+
+ def _is_s3_service(self, service_name):
+ """Whether the service is S3 or S3 Control.
+
+ Note that throughout this class, service_name refers to the endpoint
+ prefix, not the folder name of the service in botocore/data. For
+ S3 Control, the folder name is 's3control' but the endpoint prefix is
+ 's3-control'.
+ """
+ return service_name in ['s3', 's3-control']
+
+ def _compute_endpoint_config(
+ self,
+ service_name,
+ region_name,
+ endpoint_url,
+ is_secure,
+ endpoint_bridge,
+ s3_config,
+ ):
+ resolve_endpoint_kwargs = {
+ 'service_name': service_name,
+ 'region_name': region_name,
+ 'endpoint_url': endpoint_url,
+ 'is_secure': is_secure,
+ 'endpoint_bridge': endpoint_bridge,
+ }
+ if service_name == 's3':
+ return self._compute_s3_endpoint_config(
+ s3_config=s3_config, **resolve_endpoint_kwargs
+ )
+ if service_name == 'sts':
+ return self._compute_sts_endpoint_config(**resolve_endpoint_kwargs)
+ return self._resolve_endpoint(**resolve_endpoint_kwargs)
+
+ def _compute_s3_endpoint_config(
+ self, s3_config, **resolve_endpoint_kwargs
+ ):
+ force_s3_global = self._should_force_s3_global(
+ resolve_endpoint_kwargs['region_name'], s3_config
+ )
+ if force_s3_global:
+ resolve_endpoint_kwargs['region_name'] = None
+ endpoint_config = self._resolve_endpoint(**resolve_endpoint_kwargs)
+ self._set_region_if_custom_s3_endpoint(
+ endpoint_config, resolve_endpoint_kwargs['endpoint_bridge']
+ )
+ # For backwards compatibility reasons, we want to make sure the
+ # client.meta.region_name will remain us-east-1 if we forced the
+ # endpoint to be the global region. Specifically, if this value
+ # changes to aws-global, it breaks logic where a user is checking
+ # for us-east-1 as the global endpoint such as in creating buckets.
+ if force_s3_global and endpoint_config['region_name'] == 'aws-global':
+ endpoint_config['region_name'] = 'us-east-1'
+ return endpoint_config
+
+ def _should_force_s3_global(self, region_name, s3_config):
+ s3_regional_config = 'legacy'
+ if s3_config and 'us_east_1_regional_endpoint' in s3_config:
+ s3_regional_config = s3_config['us_east_1_regional_endpoint']
+ self._validate_s3_regional_config(s3_regional_config)
+
+ is_global_region = region_name in ('us-east-1', None)
+ return s3_regional_config == 'legacy' and is_global_region
+
+ def _validate_s3_regional_config(self, config_val):
+ if config_val not in VALID_REGIONAL_ENDPOINTS_CONFIG:
+ raise botocore.exceptions.InvalidS3UsEast1RegionalEndpointConfigError(
+ s3_us_east_1_regional_endpoint_config=config_val
+ )
+
+ def _set_region_if_custom_s3_endpoint(
+ self, endpoint_config, endpoint_bridge
+ ):
+ # If a user is providing a custom URL, the endpoint resolver will
+ # refuse to infer a signing region. If we want to default to s3v4,
+ # we have to account for this.
+ if (
+ endpoint_config['signing_region'] is None
+ and endpoint_config['region_name'] is None
+ ):
+ endpoint = endpoint_bridge.resolve('s3')
+ endpoint_config['signing_region'] = endpoint['signing_region']
+ endpoint_config['region_name'] = endpoint['region_name']
+
+ def _compute_sts_endpoint_config(self, **resolve_endpoint_kwargs):
+ endpoint_config = self._resolve_endpoint(**resolve_endpoint_kwargs)
+ if self._should_set_global_sts_endpoint(
+ resolve_endpoint_kwargs['region_name'],
+ resolve_endpoint_kwargs['endpoint_url'],
+ endpoint_config,
+ ):
+ self._set_global_sts_endpoint(
+ endpoint_config, resolve_endpoint_kwargs['is_secure']
+ )
+ return endpoint_config
+
+ def _should_set_global_sts_endpoint(
+ self, region_name, endpoint_url, endpoint_config
+ ):
+ has_variant_tags = endpoint_config and endpoint_config.get(
+ 'metadata', {}
+ ).get('tags')
+ if endpoint_url or has_variant_tags:
+ return False
+ return (
+ self._get_sts_regional_endpoints_config() == 'legacy'
+ and region_name in LEGACY_GLOBAL_STS_REGIONS
+ )
+
+ def _get_sts_regional_endpoints_config(self):
+ sts_regional_endpoints_config = self._config_store.get_config_variable(
+ 'sts_regional_endpoints'
+ )
+ if not sts_regional_endpoints_config:
+ sts_regional_endpoints_config = 'regional'
+ if (
+ sts_regional_endpoints_config
+ not in VALID_REGIONAL_ENDPOINTS_CONFIG
+ ):
+ raise botocore.exceptions.InvalidSTSRegionalEndpointsConfigError(
+ sts_regional_endpoints_config=sts_regional_endpoints_config
+ )
+ return sts_regional_endpoints_config
+
+ def _set_global_sts_endpoint(self, endpoint_config, is_secure):
+ scheme = 'https' if is_secure else 'http'
+ endpoint_config['endpoint_url'] = f'{scheme}://sts.amazonaws.com'
+ endpoint_config['signing_region'] = 'us-east-1'
+
+ def _resolve_endpoint(
+ self,
+ service_name,
+ region_name,
+ endpoint_url,
+ is_secure,
+ endpoint_bridge,
+ ):
+ return endpoint_bridge.resolve(
+ service_name, region_name, endpoint_url, is_secure
+ )
+
+ def _compute_socket_options(self, scoped_config, client_config=None):
+ # This disables Nagle's algorithm and is the default socket options
+ # in urllib3.
+ socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
+ client_keepalive = client_config and client_config.tcp_keepalive
+ scoped_keepalive = scoped_config and self._ensure_boolean(
+ scoped_config.get("tcp_keepalive", False)
+ )
+ # Enables TCP Keepalive if specified in client config object or shared config file.
+ if client_keepalive or scoped_keepalive:
+ socket_options.append((socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1))
+ return socket_options
+
+ def _compute_retry_config(self, config_kwargs):
+ self._compute_retry_max_attempts(config_kwargs)
+ self._compute_retry_mode(config_kwargs)
+
+ def _compute_retry_max_attempts(self, config_kwargs):
+ # There's a pre-existing max_attempts client config value that actually
+ # means max *retry* attempts. There's also a `max_attempts` we pull
+ # from the config store that means *total attempts*, which includes the
+ # intitial request. We can't change what `max_attempts` means in
+ # client config so we try to normalize everything to a new
+ # "total_max_attempts" variable. We ensure that after this, the only
+ # configuration for "max attempts" is the 'total_max_attempts' key.
+ # An explicitly provided max_attempts in the client config
+ # overrides everything.
+ retries = config_kwargs.get('retries')
+ if retries is not None:
+ if 'total_max_attempts' in retries:
+ retries.pop('max_attempts', None)
+ return
+ if 'max_attempts' in retries:
+ value = retries.pop('max_attempts')
+ # client config max_attempts means total retries so we
+ # have to add one for 'total_max_attempts' to account
+ # for the initial request.
+ retries['total_max_attempts'] = value + 1
+ return
+ # Otherwise we'll check the config store which checks env vars,
+ # config files, etc. There is no default value for max_attempts
+ # so if this returns None and we don't set a default value here.
+ max_attempts = self._config_store.get_config_variable('max_attempts')
+ if max_attempts is not None:
+ if retries is None:
+ retries = {}
+ config_kwargs['retries'] = retries
+ retries['total_max_attempts'] = max_attempts
+
+ def _compute_retry_mode(self, config_kwargs):
+ retries = config_kwargs.get('retries')
+ if retries is None:
+ retries = {}
+ config_kwargs['retries'] = retries
+ elif 'mode' in retries:
+ # If there's a retry mode explicitly set in the client config
+ # that overrides everything.
+ return
+ retry_mode = self._config_store.get_config_variable('retry_mode')
+ if retry_mode is None:
+ retry_mode = 'legacy'
+ retries['mode'] = retry_mode
+
+ def _compute_connect_timeout(self, config_kwargs):
+ # Checking if connect_timeout is set on the client config.
+ # If it is not, we check the config_store in case a
+ # non legacy default mode has been configured.
+ connect_timeout = config_kwargs.get('connect_timeout')
+ if connect_timeout is not None:
+ return
+ connect_timeout = self._config_store.get_config_variable(
+ 'connect_timeout'
+ )
+ if connect_timeout:
+ config_kwargs['connect_timeout'] = connect_timeout
+
+ def _compute_request_compression_config(self, config_kwargs):
+ min_size = config_kwargs.get('request_min_compression_size_bytes')
+ disabled = config_kwargs.get('disable_request_compression')
+ if min_size is None:
+ min_size = self._config_store.get_config_variable(
+ 'request_min_compression_size_bytes'
+ )
+ # conversion func is skipped so input validation must be done here
+ # regardless if the value is coming from the config store or the
+ # config object
+ min_size = self._validate_min_compression_size(min_size)
+ config_kwargs['request_min_compression_size_bytes'] = min_size
+
+ if disabled is None:
+ disabled = self._config_store.get_config_variable(
+ 'disable_request_compression'
+ )
+ else:
+ # if the user provided a value we must check if it's a boolean
+ disabled = ensure_boolean(disabled)
+ config_kwargs['disable_request_compression'] = disabled
+
+ def _validate_min_compression_size(self, min_size):
+ min_allowed_min_size = 1
+ max_allowed_min_size = 1048576
+ error_msg_base = (
+ f'Invalid value "{min_size}" for '
+ 'request_min_compression_size_bytes.'
+ )
+ try:
+ min_size = int(min_size)
+ except (ValueError, TypeError):
+ msg = (
+ f'{error_msg_base} Value must be an integer. '
+ f'Received {type(min_size)} instead.'
+ )
+ raise botocore.exceptions.InvalidConfigError(error_msg=msg)
+ if not min_allowed_min_size <= min_size <= max_allowed_min_size:
+ msg = (
+ f'{error_msg_base} Value must be between '
+ f'{min_allowed_min_size} and {max_allowed_min_size}.'
+ )
+ raise botocore.exceptions.InvalidConfigError(error_msg=msg)
+
+ return min_size
+
+ def _ensure_boolean(self, val):
+ if isinstance(val, bool):
+ return val
+ else:
+ return val.lower() == 'true'
+
+ def _build_endpoint_resolver(
+ self,
+ endpoints_ruleset_data,
+ partition_data,
+ client_config,
+ service_model,
+ endpoint_region_name,
+ region_name,
+ endpoint_url,
+ endpoint,
+ is_secure,
+ endpoint_bridge,
+ event_emitter,
+ credentials,
+ account_id_endpoint_mode,
+ ):
+ if endpoints_ruleset_data is None:
+ return None
+
+ # The legacy EndpointResolver is global to the session, but
+ # EndpointRulesetResolver is service-specific. Builtins for
+ # EndpointRulesetResolver must not be derived from the legacy
+ # endpoint resolver's output, including final_args, s3_config,
+ # etc.
+ s3_config_raw = self.compute_s3_config(client_config) or {}
+ service_name_raw = service_model.endpoint_prefix
+ # Maintain complex logic for s3 and sts endpoints for backwards
+ # compatibility.
+ if service_name_raw in ['s3', 'sts'] or region_name is None:
+ eprv2_region_name = endpoint_region_name
+ else:
+ eprv2_region_name = region_name
+ resolver_builtins = self.compute_endpoint_resolver_builtin_defaults(
+ region_name=eprv2_region_name,
+ service_name=service_name_raw,
+ s3_config=s3_config_raw,
+ endpoint_bridge=endpoint_bridge,
+ client_endpoint_url=endpoint_url,
+ legacy_endpoint_url=endpoint.host,
+ credentials=credentials,
+ account_id_endpoint_mode=account_id_endpoint_mode,
+ )
+ # Client context params for s3 conflict with the available settings
+ # in the `s3` parameter on the `Config` object. If the same parameter
+ # is set in both places, the value in the `s3` parameter takes priority.
+ if client_config is not None:
+ client_context = client_config.client_context_params or {}
+ else:
+ client_context = {}
+ if self._is_s3_service(service_name_raw):
+ client_context.update(s3_config_raw)
+
+ sig_version = (
+ client_config.signature_version
+ if client_config is not None
+ else None
+ )
+ return EndpointRulesetResolver(
+ endpoint_ruleset_data=endpoints_ruleset_data,
+ partition_data=partition_data,
+ service_model=service_model,
+ builtins=resolver_builtins,
+ client_context=client_context,
+ event_emitter=event_emitter,
+ use_ssl=is_secure,
+ requested_auth_scheme=sig_version,
+ )
+
+ def compute_endpoint_resolver_builtin_defaults(
+ self,
+ region_name,
+ service_name,
+ s3_config,
+ endpoint_bridge,
+ client_endpoint_url,
+ legacy_endpoint_url,
+ credentials,
+ account_id_endpoint_mode,
+ ):
+ # EndpointRulesetResolver rulesets may accept an "SDK::Endpoint" as
+ # input. If the endpoint_url argument of create_client() is set, it
+ # always takes priority.
+ if client_endpoint_url:
+ given_endpoint = client_endpoint_url
+ # If an endpoints.json data file other than the one bundled within
+ # the botocore/data directory is used, the output of legacy
+ # endpoint resolution is provided to EndpointRulesetResolver.
+ elif not endpoint_bridge.resolver_uses_builtin_data():
+ given_endpoint = legacy_endpoint_url
+ else:
+ given_endpoint = None
+
+ # The endpoint rulesets differ from legacy botocore behavior in whether
+ # forcing path style addressing in incompatible situations raises an
+ # exception or silently ignores the config setting. The
+ # AWS_S3_FORCE_PATH_STYLE parameter is adjusted both here and for each
+ # operation so that the ruleset behavior is backwards compatible.
+ if s3_config.get('use_accelerate_endpoint', False):
+ force_path_style = False
+ elif client_endpoint_url is not None and not is_s3_accelerate_url(
+ client_endpoint_url
+ ):
+ force_path_style = s3_config.get('addressing_style') != 'virtual'
+ else:
+ force_path_style = s3_config.get('addressing_style') == 'path'
+
+ return {
+ EPRBuiltins.AWS_REGION: region_name,
+ EPRBuiltins.AWS_USE_FIPS: (
+ # SDK_ENDPOINT cannot be combined with AWS_USE_FIPS
+ given_endpoint is None
+ # use legacy resolver's _resolve_endpoint_variant_config_var()
+ # or default to False if it returns None
+ and endpoint_bridge._resolve_endpoint_variant_config_var(
+ 'use_fips_endpoint'
+ )
+ or False
+ ),
+ EPRBuiltins.AWS_USE_DUALSTACK: (
+ # SDK_ENDPOINT cannot be combined with AWS_USE_DUALSTACK
+ given_endpoint is None
+ # use legacy resolver's _resolve_use_dualstack_endpoint() and
+ # or default to False if it returns None
+ and endpoint_bridge._resolve_use_dualstack_endpoint(
+ service_name
+ )
+ or False
+ ),
+ EPRBuiltins.AWS_STS_USE_GLOBAL_ENDPOINT: (
+ self._should_set_global_sts_endpoint(
+ region_name=region_name,
+ endpoint_url=None,
+ endpoint_config=None,
+ )
+ ),
+ EPRBuiltins.AWS_S3_USE_GLOBAL_ENDPOINT: (
+ self._should_force_s3_global(region_name, s3_config)
+ ),
+ EPRBuiltins.AWS_S3_ACCELERATE: s3_config.get(
+ 'use_accelerate_endpoint', False
+ ),
+ EPRBuiltins.AWS_S3_FORCE_PATH_STYLE: force_path_style,
+ EPRBuiltins.AWS_S3_USE_ARN_REGION: s3_config.get(
+ 'use_arn_region', True
+ ),
+ EPRBuiltins.AWS_S3CONTROL_USE_ARN_REGION: s3_config.get(
+ 'use_arn_region', False
+ ),
+ EPRBuiltins.AWS_S3_DISABLE_MRAP: s3_config.get(
+ 's3_disable_multiregion_access_points', False
+ ),
+ EPRBuiltins.SDK_ENDPOINT: given_endpoint,
+ EPRBuiltins.ACCOUNT_ID: credentials.get_deferred_property(
+ 'account_id'
+ )
+ if credentials
+ else None,
+ EPRBuiltins.ACCOUNT_ID_ENDPOINT_MODE: account_id_endpoint_mode,
+ }
+
+ def _compute_user_agent_appid_config(self, config_kwargs):
+ user_agent_appid = config_kwargs.get('user_agent_appid')
+ if user_agent_appid is None:
+ user_agent_appid = self._config_store.get_config_variable(
+ 'user_agent_appid'
+ )
+ if (
+ user_agent_appid is not None
+ and len(user_agent_appid) > USERAGENT_APPID_MAXLEN
+ ):
+ logger.warning(
+ 'The configured value for user_agent_appid exceeds the '
+ 'maximum length of %d characters.',
+ USERAGENT_APPID_MAXLEN,
+ )
+ config_kwargs['user_agent_appid'] = user_agent_appid
+
+ def _compute_sigv4a_signing_region_set_config(self, config_kwargs):
+ sigv4a_signing_region_set = config_kwargs.get(
+ 'sigv4a_signing_region_set'
+ )
+ if sigv4a_signing_region_set is None:
+ sigv4a_signing_region_set = self._config_store.get_config_variable(
+ 'sigv4a_signing_region_set'
+ )
+ config_kwargs['sigv4a_signing_region_set'] = sigv4a_signing_region_set
+
+ def _compute_checksum_config(self, config_kwargs):
+ self._handle_checksum_config(
+ config_kwargs,
+ config_key="request_checksum_calculation",
+ valid_options=VALID_REQUEST_CHECKSUM_CALCULATION_CONFIG,
+ )
+ self._handle_checksum_config(
+ config_kwargs,
+ config_key="response_checksum_validation",
+ valid_options=VALID_RESPONSE_CHECKSUM_VALIDATION_CONFIG,
+ )
+
+ def _handle_checksum_config(
+ self,
+ config_kwargs,
+ config_key,
+ valid_options,
+ ):
+ value = config_kwargs.get(config_key)
+ if value is None:
+ value = self._config_store.get_config_variable(config_key)
+
+ if isinstance(value, str):
+ value = value.lower()
+
+ if value not in valid_options:
+ raise botocore.exceptions.InvalidChecksumConfigError(
+ config_key=config_key,
+ config_value=value,
+ valid_options=valid_options,
+ )
+ self._register_checksum_config_feature_ids(value, config_key)
+ config_kwargs[config_key] = value
+
+ def _register_checksum_config_feature_ids(self, value, config_key):
+ checksum_config_feature_id = None
+ if config_key == "request_checksum_calculation":
+ checksum_config_feature_id = (
+ f"FLEXIBLE_CHECKSUMS_REQ_{value.upper()}"
+ )
+ elif config_key == "response_checksum_validation":
+ checksum_config_feature_id = (
+ f"FLEXIBLE_CHECKSUMS_RES_{value.upper()}"
+ )
+ if checksum_config_feature_id is not None:
+ register_feature_id(checksum_config_feature_id)
+
+ def _compute_account_id_endpoint_mode_config(self, config_kwargs):
+ config_key = 'account_id_endpoint_mode'
+
+ # Disable account id based endpoint routing for unsigned requests
+ # since there are no credentials to resolve.
+ signature_version = config_kwargs.get('signature_version')
+ if signature_version is botocore.UNSIGNED:
+ config_kwargs[config_key] = 'disabled'
+ return
+
+ account_id_endpoint_mode = config_kwargs.get(config_key)
+ if account_id_endpoint_mode is None:
+ account_id_endpoint_mode = self._config_store.get_config_variable(
+ config_key
+ )
+
+ if isinstance(account_id_endpoint_mode, str):
+ account_id_endpoint_mode = account_id_endpoint_mode.lower()
+
+ if (
+ account_id_endpoint_mode
+ not in VALID_ACCOUNT_ID_ENDPOINT_MODE_CONFIG
+ ):
+ raise botocore.exceptions.InvalidConfigError(
+ error_msg=f"The configured value '{account_id_endpoint_mode}' for '{config_key}' is "
+ f"invalid. Valid values are: {VALID_ACCOUNT_ID_ENDPOINT_MODE_CONFIG}."
+ )
+
+ config_kwargs[config_key] = account_id_endpoint_mode
+
+ def _compute_auth_scheme_preference_config(
+ self, client_config, config_kwargs
+ ):
+ config_key = 'auth_scheme_preference'
+ set_in_config_object = False
+
+ if client_config and client_config.auth_scheme_preference:
+ value = client_config.auth_scheme_preference
+ set_in_config_object = True
+ else:
+ value = self._config_store.get_config_variable(config_key)
+
+ if value is None:
+ config_kwargs[config_key] = None
+ return
+
+ if not isinstance(value, str):
+ raise botocore.exceptions.InvalidConfigError(
+ error_msg=(
+ f"{config_key} must be a comma-delimited string. "
+ f"Received {type(value)} instead: {value}."
+ )
+ )
+
+ value = ','.join(
+ item.replace(' ', '').replace('\t', '')
+ for item in value.split(',')
+ if item.strip()
+ )
+
+ if set_in_config_object:
+ value = ClientConfigString(value)
+
+ config_kwargs[config_key] = value
+
+ def _compute_signature_version_config(self, client_config, config_kwargs):
+ if client_config and client_config.signature_version:
+ value = client_config.signature_version
+ if isinstance(value, str):
+ config_kwargs['signature_version'] = ClientConfigString(value)
+
+
+class ConfigObjectWrapper:
+ """Base class to mark values set via in-code Config object."""
+
+ pass
+
+
+class ClientConfigString(str, ConfigObjectWrapper):
+ def __new__(cls, value=None):
+ return super().__new__(cls, value)
diff --git a/py311/lib/python3.11/site-packages/botocore/auth.py b/py311/lib/python3.11/site-packages/botocore/auth.py
new file mode 100644
index 0000000000000000000000000000000000000000..7754f210074f7da4de46bb414a0f31df1b7b1117
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/auth.py
@@ -0,0 +1,1227 @@
+# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import base64
+import calendar
+import datetime
+import functools
+import hmac
+import json
+import logging
+import time
+from collections.abc import Mapping
+from email.utils import formatdate
+from hashlib import sha1, sha256
+from operator import itemgetter
+
+from botocore.compat import (
+ HAS_CRT,
+ MD5_AVAILABLE, # noqa: F401
+ HTTPHeaders,
+ encodebytes,
+ ensure_unicode,
+ get_current_datetime,
+ parse_qs,
+ quote,
+ unquote,
+ urlsplit,
+ urlunsplit,
+)
+from botocore.exceptions import (
+ NoAuthTokenError,
+ NoCredentialsError,
+ UnknownSignatureVersionError,
+ UnsupportedSignatureVersionError,
+)
+from botocore.utils import (
+ is_valid_ipv6_endpoint_url,
+ normalize_url_path,
+ percent_encode_sequence,
+)
+
+logger = logging.getLogger(__name__)
+
+
+EMPTY_SHA256_HASH = (
+ 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'
+)
+# This is the buffer size used when calculating sha256 checksums.
+# Experimenting with various buffer sizes showed that this value generally
+# gave the best result (in terms of performance).
+PAYLOAD_BUFFER = 1024 * 1024
+ISO8601 = '%Y-%m-%dT%H:%M:%SZ'
+SIGV4_TIMESTAMP = '%Y%m%dT%H%M%SZ'
+SIGNED_HEADERS_BLACKLIST = [
+ 'expect',
+ 'transfer-encoding',
+ 'user-agent',
+ 'x-amzn-trace-id',
+]
+UNSIGNED_PAYLOAD = 'UNSIGNED-PAYLOAD'
+STREAMING_UNSIGNED_PAYLOAD_TRAILER = 'STREAMING-UNSIGNED-PAYLOAD-TRAILER'
+
+
+def _host_from_url(url):
+ # Given URL, derive value for host header. Ensure that value:
+ # 1) is lowercase
+ # 2) excludes port, if it was the default port
+ # 3) excludes userinfo
+ url_parts = urlsplit(url)
+ host = url_parts.hostname # urlsplit's hostname is always lowercase
+ if is_valid_ipv6_endpoint_url(url):
+ host = f'[{host}]'
+ default_ports = {
+ 'http': 80,
+ 'https': 443,
+ }
+ if url_parts.port is not None:
+ if url_parts.port != default_ports.get(url_parts.scheme):
+ host = f'{host}:{url_parts.port}'
+ return host
+
+
+def _get_body_as_dict(request):
+ # For query services, request.data is form-encoded and is already a
+ # dict, but for other services such as rest-json it could be a json
+ # string or bytes. In those cases we attempt to load the data as a
+ # dict.
+ data = request.data
+ if isinstance(data, bytes):
+ data = json.loads(data.decode('utf-8'))
+ elif isinstance(data, str):
+ data = json.loads(data)
+ return data
+
+
+class BaseSigner:
+ REQUIRES_REGION = False
+ REQUIRES_TOKEN = False
+
+ def add_auth(self, request):
+ raise NotImplementedError("add_auth")
+
+
+class TokenSigner(BaseSigner):
+ REQUIRES_TOKEN = True
+ """
+ Signers that expect an authorization token to perform the authorization
+ """
+
+ def __init__(self, auth_token):
+ self.auth_token = auth_token
+
+
+class SigV2Auth(BaseSigner):
+ """
+ Sign a request with Signature V2.
+ """
+
+ def __init__(self, credentials):
+ self.credentials = credentials
+
+ def calc_signature(self, request, params):
+ logger.debug("Calculating signature using v2 auth.")
+ split = urlsplit(request.url)
+ path = split.path
+ if len(path) == 0:
+ path = '/'
+ string_to_sign = f"{request.method}\n{split.netloc}\n{path}\n"
+ lhmac = hmac.new(
+ self.credentials.secret_key.encode("utf-8"), digestmod=sha256
+ )
+ pairs = []
+ for key in sorted(params):
+ # Any previous signature should not be a part of this
+ # one, so we skip that particular key. This prevents
+ # issues during retries.
+ if key == 'Signature':
+ continue
+ value = str(params[key])
+ quoted_key = quote(key.encode('utf-8'), safe='')
+ quoted_value = quote(value.encode('utf-8'), safe='-_~')
+ pairs.append(f'{quoted_key}={quoted_value}')
+ qs = '&'.join(pairs)
+ string_to_sign += qs
+ logger.debug('String to sign: %s', string_to_sign)
+ lhmac.update(string_to_sign.encode('utf-8'))
+ b64 = base64.b64encode(lhmac.digest()).strip().decode('utf-8')
+ return (qs, b64)
+
+ def add_auth(self, request):
+ # The auth handler is the last thing called in the
+ # preparation phase of a prepared request.
+ # Because of this we have to parse the query params
+ # from the request body so we can update them with
+ # the sigv2 auth params.
+ if self.credentials is None:
+ raise NoCredentialsError()
+ if request.data:
+ # POST
+ params = request.data
+ else:
+ # GET
+ params = request.params
+ params['AWSAccessKeyId'] = self.credentials.access_key
+ params['SignatureVersion'] = '2'
+ params['SignatureMethod'] = 'HmacSHA256'
+ params['Timestamp'] = time.strftime(ISO8601, time.gmtime())
+ if self.credentials.token:
+ params['SecurityToken'] = self.credentials.token
+ qs, signature = self.calc_signature(request, params)
+ params['Signature'] = signature
+ return request
+
+
+class SigV3Auth(BaseSigner):
+ def __init__(self, credentials):
+ self.credentials = credentials
+
+ def add_auth(self, request):
+ if self.credentials is None:
+ raise NoCredentialsError()
+ if 'Date' in request.headers:
+ del request.headers['Date']
+ request.headers['Date'] = formatdate(usegmt=True)
+ if self.credentials.token:
+ if 'X-Amz-Security-Token' in request.headers:
+ del request.headers['X-Amz-Security-Token']
+ request.headers['X-Amz-Security-Token'] = self.credentials.token
+ new_hmac = hmac.new(
+ self.credentials.secret_key.encode('utf-8'), digestmod=sha256
+ )
+ new_hmac.update(request.headers['Date'].encode('utf-8'))
+ encoded_signature = encodebytes(new_hmac.digest()).strip()
+ signature = (
+ f"AWS3-HTTPS AWSAccessKeyId={self.credentials.access_key},"
+ f"Algorithm=HmacSHA256,Signature={encoded_signature.decode('utf-8')}"
+ )
+ if 'X-Amzn-Authorization' in request.headers:
+ del request.headers['X-Amzn-Authorization']
+ request.headers['X-Amzn-Authorization'] = signature
+
+
+class SigV4Auth(BaseSigner):
+ """
+ Sign a request with Signature V4.
+ """
+
+ REQUIRES_REGION = True
+
+ def __init__(self, credentials, service_name, region_name):
+ self.credentials = credentials
+ # We initialize these value here so the unit tests can have
+ # valid values. But these will get overriden in ``add_auth``
+ # later for real requests.
+ self._region_name = region_name
+ self._service_name = service_name
+
+ def _sign(self, key, msg, hex=False):
+ if hex:
+ sig = hmac.new(key, msg.encode('utf-8'), sha256).hexdigest()
+ else:
+ sig = hmac.new(key, msg.encode('utf-8'), sha256).digest()
+ return sig
+
+ def headers_to_sign(self, request):
+ """
+ Select the headers from the request that need to be included
+ in the StringToSign.
+ """
+ header_map = HTTPHeaders()
+ for name, value in request.headers.items():
+ lname = name.lower()
+ if lname not in SIGNED_HEADERS_BLACKLIST:
+ header_map[lname] = value
+ if 'host' not in header_map:
+ # TODO: We should set the host ourselves, instead of relying on our
+ # HTTP client to set it for us.
+ header_map['host'] = _host_from_url(request.url)
+ return header_map
+
+ def canonical_query_string(self, request):
+ # The query string can come from two parts. One is the
+ # params attribute of the request. The other is from the request
+ # url (in which case we have to re-split the url into its components
+ # and parse out the query string component).
+ if request.params:
+ return self._canonical_query_string_params(request.params)
+ else:
+ return self._canonical_query_string_url(urlsplit(request.url))
+
+ def _canonical_query_string_params(self, params):
+ # [(key, value), (key2, value2)]
+ key_val_pairs = []
+ if isinstance(params, Mapping):
+ params = params.items()
+ for key, value in params:
+ key_val_pairs.append(
+ (quote(key, safe='-_.~'), quote(str(value), safe='-_.~'))
+ )
+ sorted_key_vals = []
+ # Sort by the URI-encoded key names, and in the case of
+ # repeated keys, then sort by the value.
+ for key, value in sorted(key_val_pairs):
+ sorted_key_vals.append(f'{key}={value}')
+ canonical_query_string = '&'.join(sorted_key_vals)
+ return canonical_query_string
+
+ def _canonical_query_string_url(self, parts):
+ canonical_query_string = ''
+ if parts.query:
+ # [(key, value), (key2, value2)]
+ key_val_pairs = []
+ for pair in parts.query.split('&'):
+ key, _, value = pair.partition('=')
+ key_val_pairs.append((key, value))
+ sorted_key_vals = []
+ # Sort by the URI-encoded key names, and in the case of
+ # repeated keys, then sort by the value.
+ for key, value in sorted(key_val_pairs):
+ sorted_key_vals.append(f'{key}={value}')
+ canonical_query_string = '&'.join(sorted_key_vals)
+ return canonical_query_string
+
+ def canonical_headers(self, headers_to_sign):
+ """
+ Return the headers that need to be included in the StringToSign
+ in their canonical form by converting all header keys to lower
+ case, sorting them in alphabetical order and then joining
+ them into a string, separated by newlines.
+ """
+ headers = []
+ sorted_header_names = sorted(set(headers_to_sign))
+ for key in sorted_header_names:
+ value = ','.join(
+ self._header_value(v) for v in headers_to_sign.get_all(key)
+ )
+ headers.append(f'{key}:{ensure_unicode(value)}')
+ return '\n'.join(headers)
+
+ def _header_value(self, value):
+ # From the sigv4 docs:
+ # Lowercase(HeaderName) + ':' + Trimall(HeaderValue)
+ #
+ # The Trimall function removes excess white space before and after
+ # values, and converts sequential spaces to a single space.
+ return ' '.join(value.split())
+
+ def signed_headers(self, headers_to_sign):
+ headers = sorted(n.lower().strip() for n in set(headers_to_sign))
+ return ';'.join(headers)
+
+ def _is_streaming_checksum_payload(self, request):
+ checksum_context = request.context.get('checksum', {})
+ algorithm = checksum_context.get('request_algorithm')
+ return isinstance(algorithm, dict) and algorithm.get('in') == 'trailer'
+
+ def payload(self, request):
+ if self._is_streaming_checksum_payload(request):
+ return STREAMING_UNSIGNED_PAYLOAD_TRAILER
+ elif not self._should_sha256_sign_payload(request):
+ # When payload signing is disabled, we use this static string in
+ # place of the payload checksum.
+ return UNSIGNED_PAYLOAD
+ request_body = request.body
+ if request_body and hasattr(request_body, 'seek'):
+ position = request_body.tell()
+ read_chunksize = functools.partial(
+ request_body.read, PAYLOAD_BUFFER
+ )
+ checksum = sha256()
+ for chunk in iter(read_chunksize, b''):
+ checksum.update(chunk)
+ hex_checksum = checksum.hexdigest()
+ request_body.seek(position)
+ return hex_checksum
+ elif request_body:
+ # The request serialization has ensured that
+ # request.body is a bytes() type.
+ return sha256(request_body).hexdigest()
+ else:
+ return EMPTY_SHA256_HASH
+
+ def _should_sha256_sign_payload(self, request):
+ # Payloads will always be signed over insecure connections.
+ if not request.url.startswith('https'):
+ return True
+
+ # Certain operations may have payload signing disabled by default.
+ # Since we don't have access to the operation model, we pass in this
+ # bit of metadata through the request context.
+ return request.context.get('payload_signing_enabled', True)
+
+ def canonical_request(self, request):
+ cr = [request.method.upper()]
+ path = self._normalize_url_path(urlsplit(request.url).path)
+ cr.append(path)
+ cr.append(self.canonical_query_string(request))
+ headers_to_sign = self.headers_to_sign(request)
+ cr.append(self.canonical_headers(headers_to_sign) + '\n')
+ cr.append(self.signed_headers(headers_to_sign))
+ if 'X-Amz-Content-SHA256' in request.headers:
+ body_checksum = request.headers['X-Amz-Content-SHA256']
+ else:
+ body_checksum = self.payload(request)
+ cr.append(body_checksum)
+ return '\n'.join(cr)
+
+ def _normalize_url_path(self, path):
+ normalized_path = quote(normalize_url_path(path), safe='/~')
+ return normalized_path
+
+ def scope(self, request):
+ scope = [self.credentials.access_key]
+ scope.append(request.context['timestamp'][0:8])
+ scope.append(self._region_name)
+ scope.append(self._service_name)
+ scope.append('aws4_request')
+ return '/'.join(scope)
+
+ def credential_scope(self, request):
+ scope = []
+ scope.append(request.context['timestamp'][0:8])
+ scope.append(self._region_name)
+ scope.append(self._service_name)
+ scope.append('aws4_request')
+ return '/'.join(scope)
+
+ def string_to_sign(self, request, canonical_request):
+ """
+ Return the canonical StringToSign as well as a dict
+ containing the original version of all headers that
+ were included in the StringToSign.
+ """
+ sts = ['AWS4-HMAC-SHA256']
+ sts.append(request.context['timestamp'])
+ sts.append(self.credential_scope(request))
+ sts.append(sha256(canonical_request.encode('utf-8')).hexdigest())
+ return '\n'.join(sts)
+
+ def signature(self, string_to_sign, request):
+ key = self.credentials.secret_key
+ k_date = self._sign(
+ (f"AWS4{key}").encode(), request.context["timestamp"][0:8]
+ )
+ k_region = self._sign(k_date, self._region_name)
+ k_service = self._sign(k_region, self._service_name)
+ k_signing = self._sign(k_service, 'aws4_request')
+ return self._sign(k_signing, string_to_sign, hex=True)
+
+ def add_auth(self, request):
+ if self.credentials is None:
+ raise NoCredentialsError()
+ datetime_now = get_current_datetime()
+ request.context['timestamp'] = datetime_now.strftime(SIGV4_TIMESTAMP)
+ # This could be a retry. Make sure the previous
+ # authorization header is removed first.
+ self._modify_request_before_signing(request)
+ canonical_request = self.canonical_request(request)
+ logger.debug("Calculating signature using v4 auth.")
+ logger.debug('CanonicalRequest:\n%s', canonical_request)
+ string_to_sign = self.string_to_sign(request, canonical_request)
+ logger.debug('StringToSign:\n%s', string_to_sign)
+ signature = self.signature(string_to_sign, request)
+ logger.debug('Signature:\n%s', signature)
+
+ self._inject_signature_to_request(request, signature)
+
+ def _inject_signature_to_request(self, request, signature):
+ auth_str = [f'AWS4-HMAC-SHA256 Credential={self.scope(request)}']
+ headers_to_sign = self.headers_to_sign(request)
+ auth_str.append(
+ f"SignedHeaders={self.signed_headers(headers_to_sign)}"
+ )
+ auth_str.append(f'Signature={signature}')
+ request.headers['Authorization'] = ', '.join(auth_str)
+ return request
+
+ def _modify_request_before_signing(self, request):
+ if 'Authorization' in request.headers:
+ del request.headers['Authorization']
+ self._set_necessary_date_headers(request)
+ if self.credentials.token:
+ if 'X-Amz-Security-Token' in request.headers:
+ del request.headers['X-Amz-Security-Token']
+ request.headers['X-Amz-Security-Token'] = self.credentials.token
+
+ if not request.context.get('payload_signing_enabled', True):
+ if 'X-Amz-Content-SHA256' in request.headers:
+ del request.headers['X-Amz-Content-SHA256']
+ request.headers['X-Amz-Content-SHA256'] = UNSIGNED_PAYLOAD
+
+ def _set_necessary_date_headers(self, request):
+ # The spec allows for either the Date _or_ the X-Amz-Date value to be
+ # used so we check both. If there's a Date header, we use the date
+ # header. Otherwise we use the X-Amz-Date header.
+ if 'Date' in request.headers:
+ del request.headers['Date']
+ datetime_timestamp = datetime.datetime.strptime(
+ request.context['timestamp'], SIGV4_TIMESTAMP
+ )
+ request.headers['Date'] = formatdate(
+ int(calendar.timegm(datetime_timestamp.timetuple()))
+ )
+ if 'X-Amz-Date' in request.headers:
+ del request.headers['X-Amz-Date']
+ else:
+ if 'X-Amz-Date' in request.headers:
+ del request.headers['X-Amz-Date']
+ request.headers['X-Amz-Date'] = request.context['timestamp']
+
+
+class S3SigV4Auth(SigV4Auth):
+ def _modify_request_before_signing(self, request):
+ super()._modify_request_before_signing(request)
+ if 'X-Amz-Content-SHA256' in request.headers:
+ del request.headers['X-Amz-Content-SHA256']
+
+ request.headers['X-Amz-Content-SHA256'] = self.payload(request)
+
+ def _should_sha256_sign_payload(self, request):
+ # S3 allows optional body signing, so to minimize the performance
+ # impact, we opt to not SHA256 sign the body on streaming uploads,
+ # provided that we're on https.
+ client_config = request.context.get('client_config')
+ s3_config = getattr(client_config, 's3', None)
+
+ # The config could be None if it isn't set, or if the customer sets it
+ # to None.
+ if s3_config is None:
+ s3_config = {}
+
+ # The explicit configuration takes precedence over any implicit
+ # configuration.
+ sign_payload = s3_config.get('payload_signing_enabled', None)
+ if sign_payload is not None:
+ return sign_payload
+
+ # We require that both a checksum be present and https be enabled
+ # to implicitly disable body signing. The combination of TLS and
+ # a checksum is sufficiently secure and durable for us to be
+ # confident in the request without body signing.
+ checksum_header = 'Content-MD5'
+ checksum_context = request.context.get('checksum', {})
+ algorithm = checksum_context.get('request_algorithm')
+ if isinstance(algorithm, dict) and algorithm.get('in') == 'header':
+ checksum_header = algorithm['name']
+ if (
+ not request.url.startswith("https")
+ or checksum_header not in request.headers
+ ):
+ return True
+
+ # If the input is streaming we disable body signing by default.
+ if request.context.get('has_streaming_input', False):
+ return False
+
+ # If the S3-specific checks had no results, delegate to the generic
+ # checks.
+ return super()._should_sha256_sign_payload(request)
+
+ def _normalize_url_path(self, path):
+ # For S3, we do not normalize the path.
+ return path
+
+
+class S3ExpressAuth(S3SigV4Auth):
+ REQUIRES_IDENTITY_CACHE = True
+
+ def __init__(
+ self, credentials, service_name, region_name, *, identity_cache
+ ):
+ super().__init__(credentials, service_name, region_name)
+ self._identity_cache = identity_cache
+
+ def add_auth(self, request):
+ super().add_auth(request)
+
+ def _modify_request_before_signing(self, request):
+ super()._modify_request_before_signing(request)
+ if 'x-amz-s3session-token' not in request.headers:
+ request.headers['x-amz-s3session-token'] = self.credentials.token
+ # S3Express does not support STS' X-Amz-Security-Token
+ if 'X-Amz-Security-Token' in request.headers:
+ del request.headers['X-Amz-Security-Token']
+
+
+class S3ExpressPostAuth(S3ExpressAuth):
+ REQUIRES_IDENTITY_CACHE = True
+
+ def add_auth(self, request):
+ datetime_now = get_current_datetime()
+ request.context['timestamp'] = datetime_now.strftime(SIGV4_TIMESTAMP)
+
+ fields = {}
+ if request.context.get('s3-presign-post-fields', None) is not None:
+ fields = request.context['s3-presign-post-fields']
+
+ policy = {}
+ conditions = []
+ if request.context.get('s3-presign-post-policy', None) is not None:
+ policy = request.context['s3-presign-post-policy']
+ if policy.get('conditions', None) is not None:
+ conditions = policy['conditions']
+
+ policy['conditions'] = conditions
+
+ fields['x-amz-algorithm'] = 'AWS4-HMAC-SHA256'
+ fields['x-amz-credential'] = self.scope(request)
+ fields['x-amz-date'] = request.context['timestamp']
+
+ conditions.append({'x-amz-algorithm': 'AWS4-HMAC-SHA256'})
+ conditions.append({'x-amz-credential': self.scope(request)})
+ conditions.append({'x-amz-date': request.context['timestamp']})
+
+ if self.credentials.token is not None:
+ fields['X-Amz-S3session-Token'] = self.credentials.token
+ conditions.append(
+ {'X-Amz-S3session-Token': self.credentials.token}
+ )
+
+ # Dump the base64 encoded policy into the fields dictionary.
+ fields['policy'] = base64.b64encode(
+ json.dumps(policy).encode('utf-8')
+ ).decode('utf-8')
+
+ fields['x-amz-signature'] = self.signature(fields['policy'], request)
+
+ request.context['s3-presign-post-fields'] = fields
+ request.context['s3-presign-post-policy'] = policy
+
+
+class S3ExpressQueryAuth(S3ExpressAuth):
+ DEFAULT_EXPIRES = 300
+ REQUIRES_IDENTITY_CACHE = True
+
+ def __init__(
+ self,
+ credentials,
+ service_name,
+ region_name,
+ *,
+ identity_cache,
+ expires=DEFAULT_EXPIRES,
+ ):
+ super().__init__(
+ credentials,
+ service_name,
+ region_name,
+ identity_cache=identity_cache,
+ )
+ self._expires = expires
+
+ def _modify_request_before_signing(self, request):
+ # We automatically set this header, so if it's the auto-set value we
+ # want to get rid of it since it doesn't make sense for presigned urls.
+ content_type = request.headers.get('content-type')
+ blocklisted_content_type = (
+ 'application/x-www-form-urlencoded; charset=utf-8'
+ )
+ if content_type == blocklisted_content_type:
+ del request.headers['content-type']
+
+ # Note that we're not including X-Amz-Signature.
+ # From the docs: "The Canonical Query String must include all the query
+ # parameters from the preceding table except for X-Amz-Signature.
+ signed_headers = self.signed_headers(self.headers_to_sign(request))
+
+ auth_params = {
+ 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
+ 'X-Amz-Credential': self.scope(request),
+ 'X-Amz-Date': request.context['timestamp'],
+ 'X-Amz-Expires': self._expires,
+ 'X-Amz-SignedHeaders': signed_headers,
+ }
+ if self.credentials.token is not None:
+ auth_params['X-Amz-S3session-Token'] = self.credentials.token
+ # Now parse the original query string to a dict, inject our new query
+ # params, and serialize back to a query string.
+ url_parts = urlsplit(request.url)
+ # parse_qs makes each value a list, but in our case we know we won't
+ # have repeated keys so we know we have single element lists which we
+ # can convert back to scalar values.
+ query_string_parts = parse_qs(url_parts.query, keep_blank_values=True)
+ query_dict = {k: v[0] for k, v in query_string_parts.items()}
+
+ if request.params:
+ query_dict.update(request.params)
+ request.params = {}
+ # The spec is particular about this. It *has* to be:
+ # https://?&
+ # You can't mix the two types of params together, i.e just keep doing
+ # new_query_params.update(op_params)
+ # new_query_params.update(auth_params)
+ # percent_encode_sequence(new_query_params)
+ operation_params = ''
+ if request.data:
+ # We also need to move the body params into the query string. To
+ # do this, we first have to convert it to a dict.
+ query_dict.update(_get_body_as_dict(request))
+ request.data = ''
+ if query_dict:
+ operation_params = percent_encode_sequence(query_dict) + '&'
+ new_query_string = (
+ f"{operation_params}{percent_encode_sequence(auth_params)}"
+ )
+ # url_parts is a tuple (and therefore immutable) so we need to create
+ # a new url_parts with the new query string.
+ # -
+ # scheme - 0
+ # netloc - 1
+ # path - 2
+ # query - 3 <-- we're replacing this.
+ # fragment - 4
+ p = url_parts
+ new_url_parts = (p[0], p[1], p[2], new_query_string, p[4])
+ request.url = urlunsplit(new_url_parts)
+
+ def _inject_signature_to_request(self, request, signature):
+ # Rather than calculating an "Authorization" header, for the query
+ # param quth, we just append an 'X-Amz-Signature' param to the end
+ # of the query string.
+ request.url += f'&X-Amz-Signature={signature}'
+
+ def _normalize_url_path(self, path):
+ # For S3, we do not normalize the path.
+ return path
+
+ def payload(self, request):
+ # From the doc link above:
+ # "You don't include a payload hash in the Canonical Request, because
+ # when you create a presigned URL, you don't know anything about the
+ # payload. Instead, you use a constant string "UNSIGNED-PAYLOAD".
+ return UNSIGNED_PAYLOAD
+
+
+class SigV4QueryAuth(SigV4Auth):
+ DEFAULT_EXPIRES = 3600
+
+ def __init__(
+ self, credentials, service_name, region_name, expires=DEFAULT_EXPIRES
+ ):
+ super().__init__(credentials, service_name, region_name)
+ self._expires = expires
+
+ def _modify_request_before_signing(self, request):
+ # We automatically set this header, so if it's the auto-set value we
+ # want to get rid of it since it doesn't make sense for presigned urls.
+ content_type = request.headers.get('content-type')
+ blacklisted_content_type = (
+ 'application/x-www-form-urlencoded; charset=utf-8'
+ )
+ if content_type == blacklisted_content_type:
+ del request.headers['content-type']
+
+ # Note that we're not including X-Amz-Signature.
+ # From the docs: "The Canonical Query String must include all the query
+ # parameters from the preceding table except for X-Amz-Signature.
+ signed_headers = self.signed_headers(self.headers_to_sign(request))
+
+ auth_params = {
+ 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
+ 'X-Amz-Credential': self.scope(request),
+ 'X-Amz-Date': request.context['timestamp'],
+ 'X-Amz-Expires': self._expires,
+ 'X-Amz-SignedHeaders': signed_headers,
+ }
+ if self.credentials.token is not None:
+ auth_params['X-Amz-Security-Token'] = self.credentials.token
+ # Now parse the original query string to a dict, inject our new query
+ # params, and serialize back to a query string.
+ url_parts = urlsplit(request.url)
+ # parse_qs makes each value a list, but in our case we know we won't
+ # have repeated keys so we know we have single element lists which we
+ # can convert back to scalar values.
+ query_string_parts = parse_qs(url_parts.query, keep_blank_values=True)
+ query_dict = {k: v[0] for k, v in query_string_parts.items()}
+
+ if request.params:
+ query_dict.update(request.params)
+ request.params = {}
+ # The spec is particular about this. It *has* to be:
+ # https://?&
+ # You can't mix the two types of params together, i.e just keep doing
+ # new_query_params.update(op_params)
+ # new_query_params.update(auth_params)
+ # percent_encode_sequence(new_query_params)
+ operation_params = ''
+ if request.data:
+ # We also need to move the body params into the query string. To
+ # do this, we first have to convert it to a dict.
+ query_dict.update(_get_body_as_dict(request))
+ request.data = ''
+ if query_dict:
+ operation_params = percent_encode_sequence(query_dict) + '&'
+ new_query_string = (
+ f"{operation_params}{percent_encode_sequence(auth_params)}"
+ )
+ # url_parts is a tuple (and therefore immutable) so we need to create
+ # a new url_parts with the new query string.
+ # -
+ # scheme - 0
+ # netloc - 1
+ # path - 2
+ # query - 3 <-- we're replacing this.
+ # fragment - 4
+ p = url_parts
+ new_url_parts = (p[0], p[1], p[2], new_query_string, p[4])
+ request.url = urlunsplit(new_url_parts)
+
+ def _inject_signature_to_request(self, request, signature):
+ # Rather than calculating an "Authorization" header, for the query
+ # param quth, we just append an 'X-Amz-Signature' param to the end
+ # of the query string.
+ request.url += f'&X-Amz-Signature={signature}'
+
+
+class S3SigV4QueryAuth(SigV4QueryAuth):
+ """S3 SigV4 auth using query parameters.
+
+ This signer will sign a request using query parameters and signature
+ version 4, i.e a "presigned url" signer.
+
+ Based off of:
+
+ http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html
+
+ """
+
+ def _normalize_url_path(self, path):
+ # For S3, we do not normalize the path.
+ return path
+
+ def payload(self, request):
+ # From the doc link above:
+ # "You don't include a payload hash in the Canonical Request, because
+ # when you create a presigned URL, you don't know anything about the
+ # payload. Instead, you use a constant string "UNSIGNED-PAYLOAD".
+ return UNSIGNED_PAYLOAD
+
+
+class S3SigV4PostAuth(SigV4Auth):
+ """
+ Presigns a s3 post
+
+ Implementation doc here:
+ http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-UsingHTTPPOST.html
+ """
+
+ def add_auth(self, request):
+ datetime_now = get_current_datetime()
+ request.context['timestamp'] = datetime_now.strftime(SIGV4_TIMESTAMP)
+
+ fields = {}
+ if request.context.get('s3-presign-post-fields', None) is not None:
+ fields = request.context['s3-presign-post-fields']
+
+ policy = {}
+ conditions = []
+ if request.context.get('s3-presign-post-policy', None) is not None:
+ policy = request.context['s3-presign-post-policy']
+ if policy.get('conditions', None) is not None:
+ conditions = policy['conditions']
+
+ policy['conditions'] = conditions
+
+ fields['x-amz-algorithm'] = 'AWS4-HMAC-SHA256'
+ fields['x-amz-credential'] = self.scope(request)
+ fields['x-amz-date'] = request.context['timestamp']
+
+ conditions.append({'x-amz-algorithm': 'AWS4-HMAC-SHA256'})
+ conditions.append({'x-amz-credential': self.scope(request)})
+ conditions.append({'x-amz-date': request.context['timestamp']})
+
+ if self.credentials.token is not None:
+ fields['x-amz-security-token'] = self.credentials.token
+ conditions.append({'x-amz-security-token': self.credentials.token})
+
+ # Dump the base64 encoded policy into the fields dictionary.
+ fields['policy'] = base64.b64encode(
+ json.dumps(policy).encode('utf-8')
+ ).decode('utf-8')
+
+ fields['x-amz-signature'] = self.signature(fields['policy'], request)
+
+ request.context['s3-presign-post-fields'] = fields
+ request.context['s3-presign-post-policy'] = policy
+
+
+class HmacV1Auth(BaseSigner):
+ # List of Query String Arguments of Interest
+ QSAOfInterest = [
+ 'accelerate',
+ 'acl',
+ 'cors',
+ 'defaultObjectAcl',
+ 'location',
+ 'logging',
+ 'partNumber',
+ 'policy',
+ 'requestPayment',
+ 'torrent',
+ 'versioning',
+ 'versionId',
+ 'versions',
+ 'website',
+ 'uploads',
+ 'uploadId',
+ 'response-content-type',
+ 'response-content-language',
+ 'response-expires',
+ 'response-cache-control',
+ 'response-content-disposition',
+ 'response-content-encoding',
+ 'delete',
+ 'lifecycle',
+ 'tagging',
+ 'restore',
+ 'storageClass',
+ 'notification',
+ 'replication',
+ 'requestPayment',
+ 'analytics',
+ 'metrics',
+ 'inventory',
+ 'select',
+ 'select-type',
+ 'object-lock',
+ ]
+
+ def __init__(self, credentials, service_name=None, region_name=None):
+ self.credentials = credentials
+
+ def sign_string(self, string_to_sign):
+ new_hmac = hmac.new(
+ self.credentials.secret_key.encode('utf-8'), digestmod=sha1
+ )
+ new_hmac.update(string_to_sign.encode('utf-8'))
+ return encodebytes(new_hmac.digest()).strip().decode('utf-8')
+
+ def canonical_standard_headers(self, headers):
+ interesting_headers = ['content-md5', 'content-type', 'date']
+ hoi = []
+ if 'Date' in headers:
+ del headers['Date']
+ headers['Date'] = self._get_date()
+ for ih in interesting_headers:
+ found = False
+ for key in headers:
+ lk = key.lower()
+ if headers[key] is not None and lk == ih:
+ hoi.append(headers[key].strip())
+ found = True
+ if not found:
+ hoi.append('')
+ return '\n'.join(hoi)
+
+ def canonical_custom_headers(self, headers):
+ hoi = []
+ custom_headers = {}
+ for key in headers:
+ lk = key.lower()
+ if headers[key] is not None:
+ if lk.startswith('x-amz-'):
+ custom_headers[lk] = ','.join(
+ v.strip() for v in headers.get_all(key)
+ )
+ sorted_header_keys = sorted(custom_headers.keys())
+ for key in sorted_header_keys:
+ hoi.append(f"{key}:{custom_headers[key]}")
+ return '\n'.join(hoi)
+
+ def unquote_v(self, nv):
+ """
+ TODO: Do we need this?
+ """
+ if len(nv) == 1:
+ return nv
+ else:
+ return (nv[0], unquote(nv[1]))
+
+ def canonical_resource(self, split, auth_path=None):
+ # don't include anything after the first ? in the resource...
+ # unless it is one of the QSA of interest, defined above
+ # NOTE:
+ # The path in the canonical resource should always be the
+ # full path including the bucket name, even for virtual-hosting
+ # style addressing. The ``auth_path`` keeps track of the full
+ # path for the canonical resource and would be passed in if
+ # the client was using virtual-hosting style.
+ if auth_path is not None:
+ buf = auth_path
+ else:
+ buf = split.path
+ if split.query:
+ qsa = split.query.split('&')
+ qsa = [a.split('=', 1) for a in qsa]
+ qsa = [
+ self.unquote_v(a) for a in qsa if a[0] in self.QSAOfInterest
+ ]
+ if len(qsa) > 0:
+ qsa.sort(key=itemgetter(0))
+ qsa = ['='.join(a) for a in qsa]
+ buf += '?'
+ buf += '&'.join(qsa)
+ return buf
+
+ def canonical_string(
+ self, method, split, headers, expires=None, auth_path=None
+ ):
+ cs = method.upper() + '\n'
+ cs += self.canonical_standard_headers(headers) + '\n'
+ custom_headers = self.canonical_custom_headers(headers)
+ if custom_headers:
+ cs += custom_headers + '\n'
+ cs += self.canonical_resource(split, auth_path=auth_path)
+ return cs
+
+ def get_signature(
+ self, method, split, headers, expires=None, auth_path=None
+ ):
+ if self.credentials.token:
+ del headers['x-amz-security-token']
+ headers['x-amz-security-token'] = self.credentials.token
+ string_to_sign = self.canonical_string(
+ method, split, headers, auth_path=auth_path
+ )
+ logger.debug('StringToSign:\n%s', string_to_sign)
+ return self.sign_string(string_to_sign)
+
+ def add_auth(self, request):
+ if self.credentials is None:
+ raise NoCredentialsError
+ logger.debug("Calculating signature using hmacv1 auth.")
+ split = urlsplit(request.url)
+ logger.debug("HTTP request method: %s", request.method)
+ signature = self.get_signature(
+ request.method, split, request.headers, auth_path=request.auth_path
+ )
+ self._inject_signature(request, signature)
+
+ def _get_date(self):
+ return formatdate(usegmt=True)
+
+ def _inject_signature(self, request, signature):
+ if 'Authorization' in request.headers:
+ # We have to do this because request.headers is not
+ # normal dictionary. It has the (unintuitive) behavior
+ # of aggregating repeated setattr calls for the same
+ # key value. For example:
+ # headers['foo'] = 'a'; headers['foo'] = 'b'
+ # list(headers) will print ['foo', 'foo'].
+ del request.headers['Authorization']
+
+ auth_header = f"AWS {self.credentials.access_key}:{signature}"
+ request.headers['Authorization'] = auth_header
+
+
+class HmacV1QueryAuth(HmacV1Auth):
+ """
+ Generates a presigned request for s3.
+
+ Spec from this document:
+
+ http://docs.aws.amazon.com/AmazonS3/latest/dev/RESTAuthentication.html
+ #RESTAuthenticationQueryStringAuth
+
+ """
+
+ DEFAULT_EXPIRES = 3600
+
+ def __init__(self, credentials, expires=DEFAULT_EXPIRES):
+ self.credentials = credentials
+ self._expires = expires
+
+ def _get_date(self):
+ return str(int(time.time() + int(self._expires)))
+
+ def _inject_signature(self, request, signature):
+ query_dict = {}
+ query_dict['AWSAccessKeyId'] = self.credentials.access_key
+ query_dict['Signature'] = signature
+
+ for header_key in request.headers:
+ lk = header_key.lower()
+ # For query string requests, Expires is used instead of the
+ # Date header.
+ if header_key == 'Date':
+ query_dict['Expires'] = request.headers['Date']
+ # We only want to include relevant headers in the query string.
+ # These can be anything that starts with x-amz, is Content-MD5,
+ # or is Content-Type.
+ elif lk.startswith('x-amz-') or lk in (
+ 'content-md5',
+ 'content-type',
+ ):
+ query_dict[lk] = request.headers[lk]
+ # Combine all of the identified headers into an encoded
+ # query string
+ new_query_string = percent_encode_sequence(query_dict)
+
+ # Create a new url with the presigned url.
+ p = urlsplit(request.url)
+ if p[3]:
+ # If there was a pre-existing query string, we should
+ # add that back before injecting the new query string.
+ new_query_string = f'{p[3]}&{new_query_string}'
+ new_url_parts = (p[0], p[1], p[2], new_query_string, p[4])
+ request.url = urlunsplit(new_url_parts)
+
+
+class HmacV1PostAuth(HmacV1Auth):
+ """
+ Generates a presigned post for s3.
+
+ Spec from this document:
+
+ http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingHTTPPOST.html
+ """
+
+ def add_auth(self, request):
+ fields = {}
+ if request.context.get('s3-presign-post-fields', None) is not None:
+ fields = request.context['s3-presign-post-fields']
+
+ policy = {}
+ conditions = []
+ if request.context.get('s3-presign-post-policy', None) is not None:
+ policy = request.context['s3-presign-post-policy']
+ if policy.get('conditions', None) is not None:
+ conditions = policy['conditions']
+
+ policy['conditions'] = conditions
+
+ fields['AWSAccessKeyId'] = self.credentials.access_key
+
+ if self.credentials.token is not None:
+ fields['x-amz-security-token'] = self.credentials.token
+ conditions.append({'x-amz-security-token': self.credentials.token})
+
+ # Dump the base64 encoded policy into the fields dictionary.
+ fields['policy'] = base64.b64encode(
+ json.dumps(policy).encode('utf-8')
+ ).decode('utf-8')
+
+ fields['signature'] = self.sign_string(fields['policy'])
+
+ request.context['s3-presign-post-fields'] = fields
+ request.context['s3-presign-post-policy'] = policy
+
+
+class BearerAuth(TokenSigner):
+ """
+ Performs bearer token authorization by placing the bearer token in the
+ Authorization header as specified by Section 2.1 of RFC 6750.
+
+ https://datatracker.ietf.org/doc/html/rfc6750#section-2.1
+ """
+
+ def add_auth(self, request):
+ if self.auth_token is None:
+ raise NoAuthTokenError()
+
+ auth_header = f'Bearer {self.auth_token.token}'
+ if 'Authorization' in request.headers:
+ del request.headers['Authorization']
+ request.headers['Authorization'] = auth_header
+
+
+def resolve_auth_type(auth_trait):
+ for auth_type in auth_trait:
+ if auth_type == 'smithy.api#noAuth':
+ return AUTH_TYPE_TO_SIGNATURE_VERSION[auth_type]
+ elif auth_type in AUTH_TYPE_TO_SIGNATURE_VERSION:
+ signature_version = AUTH_TYPE_TO_SIGNATURE_VERSION[auth_type]
+ if signature_version in AUTH_TYPE_MAPS:
+ return signature_version
+ else:
+ raise UnknownSignatureVersionError(signature_version=auth_type)
+ raise UnsupportedSignatureVersionError(signature_version=auth_trait)
+
+
+def resolve_auth_scheme_preference(preference_list, auth_options):
+ service_supported = [scheme.split('#')[-1] for scheme in auth_options]
+
+ unsupported = [
+ scheme
+ for scheme in preference_list
+ if scheme not in AUTH_PREF_TO_SIGNATURE_VERSION
+ ]
+ if unsupported:
+ logger.debug(
+ "Unsupported auth schemes in preference list: %r", unsupported
+ )
+
+ combined = preference_list + service_supported
+ prioritized_schemes = [
+ scheme
+ for scheme in dict.fromkeys(combined)
+ if scheme in service_supported
+ ]
+
+ for scheme in prioritized_schemes:
+ if scheme == 'noAuth':
+ return AUTH_PREF_TO_SIGNATURE_VERSION[scheme]
+ sig_version = AUTH_PREF_TO_SIGNATURE_VERSION.get(scheme)
+ if sig_version in AUTH_TYPE_MAPS:
+ return sig_version
+
+ raise UnsupportedSignatureVersionError(
+ signature_version=', '.join(sorted(service_supported))
+ )
+
+
+AUTH_TYPE_MAPS = {
+ 'v2': SigV2Auth,
+ 'v3': SigV3Auth,
+ 'v3https': SigV3Auth,
+ 's3': HmacV1Auth,
+ 's3-query': HmacV1QueryAuth,
+ 's3-presign-post': HmacV1PostAuth,
+ 's3v4-presign-post': S3SigV4PostAuth,
+ 'v4-s3express': S3ExpressAuth,
+ 'v4-s3express-query': S3ExpressQueryAuth,
+ 'v4-s3express-presign-post': S3ExpressPostAuth,
+ 'bearer': BearerAuth,
+}
+
+# Define v4 signers depending on if CRT is present
+if HAS_CRT:
+ from botocore.crt.auth import CRT_AUTH_TYPE_MAPS
+
+ AUTH_TYPE_MAPS.update(CRT_AUTH_TYPE_MAPS)
+else:
+ AUTH_TYPE_MAPS.update(
+ {
+ 'v4': SigV4Auth,
+ 'v4-query': SigV4QueryAuth,
+ 's3v4': S3SigV4Auth,
+ 's3v4-query': S3SigV4QueryAuth,
+ }
+ )
+
+AUTH_TYPE_TO_SIGNATURE_VERSION = {
+ 'aws.auth#sigv4': 'v4',
+ 'aws.auth#sigv4a': 'v4a',
+ 'smithy.api#httpBearerAuth': 'bearer',
+ 'smithy.api#noAuth': 'none',
+}
+
+# Mapping used specifically for resolving user-configured auth scheme preferences.
+# This is similar to AUTH_TYPE_TO_SIGNATURE_VERSION, but uses simplified keys by
+# stripping the auth trait prefixes ('smithy.api#httpBearerAuth' → 'httpBearerAuth').
+# These simplified keys match what customers are expected to provide in configuration.
+AUTH_PREF_TO_SIGNATURE_VERSION = {
+ auth_scheme.split('#')[-1]: sig_version
+ for auth_scheme, sig_version in AUTH_TYPE_TO_SIGNATURE_VERSION.items()
+}
diff --git a/py311/lib/python3.11/site-packages/botocore/awsrequest.py b/py311/lib/python3.11/site-packages/botocore/awsrequest.py
new file mode 100644
index 0000000000000000000000000000000000000000..06681395a223e950bdce3c010caca2b644fe3fd1
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/awsrequest.py
@@ -0,0 +1,635 @@
+# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import functools
+import logging
+from collections.abc import Mapping
+
+import urllib3.util
+from urllib3.connection import HTTPConnection, VerifiedHTTPSConnection
+from urllib3.connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+
+import botocore.utils
+from botocore.compat import (
+ HTTPHeaders,
+ HTTPResponse,
+ MutableMapping,
+ urlencode,
+ urlparse,
+ urlsplit,
+ urlunsplit,
+)
+from botocore.exceptions import UnseekableStreamError
+
+logger = logging.getLogger(__name__)
+
+
+class AWSHTTPResponse(HTTPResponse):
+ # The *args, **kwargs is used because the args are slightly
+ # different in py2.6 than in py2.7/py3.
+ def __init__(self, *args, **kwargs):
+ self._status_tuple = kwargs.pop('status_tuple')
+ HTTPResponse.__init__(self, *args, **kwargs)
+
+ def _read_status(self):
+ if self._status_tuple is not None:
+ status_tuple = self._status_tuple
+ self._status_tuple = None
+ return status_tuple
+ else:
+ return HTTPResponse._read_status(self)
+
+
+class AWSConnection:
+ """Mixin for HTTPConnection that supports Expect 100-continue.
+
+ This when mixed with a subclass of httplib.HTTPConnection (though
+ technically we subclass from urllib3, which subclasses
+ httplib.HTTPConnection) and we only override this class to support Expect
+ 100-continue, which we need for S3. As far as I can tell, this is
+ general purpose enough to not be specific to S3, but I'm being
+ tentative and keeping it in botocore because I've only tested
+ this against AWS services.
+
+ """
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._original_response_cls = self.response_class
+ # This variable is set when we receive an early response from the
+ # server. If this value is set to True, any calls to send() are noops.
+ # This value is reset to false every time _send_request is called.
+ # This is to workaround changes in urllib3 2.0 which uses separate
+ # send() calls in request() instead of delegating to endheaders(),
+ # which is where the body is sent in CPython's HTTPConnection.
+ self._response_received = False
+ self._expect_header_set = False
+ self._send_called = False
+
+ def close(self):
+ super().close()
+ # Reset all of our instance state we were tracking.
+ self._response_received = False
+ self._expect_header_set = False
+ self._send_called = False
+ self.response_class = self._original_response_cls
+
+ def request(self, method, url, body=None, headers=None, *args, **kwargs):
+ if headers is None:
+ headers = {}
+ self._response_received = False
+ if headers.get('Expect', b'') == b'100-continue':
+ self._expect_header_set = True
+ else:
+ self._expect_header_set = False
+ self.response_class = self._original_response_cls
+ rval = super().request(method, url, body, headers, *args, **kwargs)
+ self._expect_header_set = False
+ return rval
+
+ def _convert_to_bytes(self, mixed_buffer):
+ # Take a list of mixed str/bytes and convert it
+ # all into a single bytestring.
+ # Any str will be encoded as utf-8.
+ bytes_buffer = []
+ for chunk in mixed_buffer:
+ if isinstance(chunk, str):
+ bytes_buffer.append(chunk.encode('utf-8'))
+ else:
+ bytes_buffer.append(chunk)
+ msg = b"\r\n".join(bytes_buffer)
+ return msg
+
+ def _send_output(self, message_body=None, *args, **kwargs):
+ self._buffer.extend((b"", b""))
+ msg = self._convert_to_bytes(self._buffer)
+ del self._buffer[:]
+ # If msg and message_body are sent in a single send() call,
+ # it will avoid performance problems caused by the interaction
+ # between delayed ack and the Nagle algorithm.
+ if isinstance(message_body, bytes):
+ msg += message_body
+ message_body = None
+ self.send(msg)
+ if self._expect_header_set:
+ # This is our custom behavior. If the Expect header was
+ # set, it will trigger this custom behavior.
+ logger.debug("Waiting for 100 Continue response.")
+ # Wait for 1 second for the server to send a response.
+ if urllib3.util.wait_for_read(self.sock, 1):
+ self._handle_expect_response(message_body)
+ return
+ else:
+ # From the RFC:
+ # Because of the presence of older implementations, the
+ # protocol allows ambiguous situations in which a client may
+ # send "Expect: 100-continue" without receiving either a 417
+ # (Expectation Failed) status or a 100 (Continue) status.
+ # Therefore, when a client sends this header field to an origin
+ # server (possibly via a proxy) from which it has never seen a
+ # 100 (Continue) status, the client SHOULD NOT wait for an
+ # indefinite period before sending the request body.
+ logger.debug(
+ "No response seen from server, continuing to "
+ "send the response body."
+ )
+ if message_body is not None:
+ # message_body was not a string (i.e. it is a file), and
+ # we must run the risk of Nagle.
+ self.send(message_body)
+
+ def _consume_headers(self, fp):
+ # Most servers (including S3) will just return
+ # the CLRF after the 100 continue response. However,
+ # some servers (I've specifically seen this for squid when
+ # used as a straight HTTP proxy) will also inject a
+ # Connection: keep-alive header. To account for this
+ # we'll read until we read '\r\n', and ignore any headers
+ # that come immediately after the 100 continue response.
+ current = None
+ while current != b'\r\n':
+ current = fp.readline()
+
+ def _handle_expect_response(self, message_body):
+ # This is called when we sent the request headers containing
+ # an Expect: 100-continue header and received a response.
+ # We now need to figure out what to do.
+ fp = self.sock.makefile('rb', 0)
+ try:
+ maybe_status_line = fp.readline()
+ parts = maybe_status_line.split(None, 2)
+ if self._is_100_continue_status(maybe_status_line):
+ self._consume_headers(fp)
+ logger.debug(
+ "100 Continue response seen, now sending request body."
+ )
+ self._send_message_body(message_body)
+ elif len(parts) == 3 and parts[0].startswith(b'HTTP/'):
+ # From the RFC:
+ # Requirements for HTTP/1.1 origin servers:
+ #
+ # - Upon receiving a request which includes an Expect
+ # request-header field with the "100-continue"
+ # expectation, an origin server MUST either respond with
+ # 100 (Continue) status and continue to read from the
+ # input stream, or respond with a final status code.
+ #
+ # So if we don't get a 100 Continue response, then
+ # whatever the server has sent back is the final response
+ # and don't send the message_body.
+ logger.debug(
+ "Received a non 100 Continue response "
+ "from the server, NOT sending request body."
+ )
+ status_tuple = (
+ parts[0].decode('ascii'),
+ int(parts[1]),
+ parts[2].decode('ascii'),
+ )
+ response_class = functools.partial(
+ AWSHTTPResponse, status_tuple=status_tuple
+ )
+ self.response_class = response_class
+ self._response_received = True
+ finally:
+ fp.close()
+
+ def _send_message_body(self, message_body):
+ if message_body is not None:
+ self.send(message_body)
+
+ def send(self, str):
+ if self._response_received:
+ if not self._send_called:
+ # urllib3 2.0 chunks and calls send potentially
+ # thousands of times inside `request` unlike the
+ # standard library. Only log this once for sanity.
+ logger.debug(
+ "send() called, but response already received. "
+ "Not sending data."
+ )
+ self._send_called = True
+ return
+ return super().send(str)
+
+ def _is_100_continue_status(self, maybe_status_line):
+ parts = maybe_status_line.split(None, 2)
+ # Check for HTTP/ 100 Continue\r\n or HTTP/ 100\r\n
+ return (
+ len(parts) >= 2
+ and parts[0].startswith(b'HTTP/')
+ and parts[1] == b'100'
+ )
+
+
+class AWSHTTPConnection(AWSConnection, HTTPConnection):
+ """An HTTPConnection that supports 100 Continue behavior."""
+
+
+class AWSHTTPSConnection(AWSConnection, VerifiedHTTPSConnection):
+ """An HTTPSConnection that supports 100 Continue behavior."""
+
+
+class AWSHTTPConnectionPool(HTTPConnectionPool):
+ ConnectionCls = AWSHTTPConnection
+
+
+class AWSHTTPSConnectionPool(HTTPSConnectionPool):
+ ConnectionCls = AWSHTTPSConnection
+
+
+def prepare_request_dict(
+ request_dict, endpoint_url, context=None, user_agent=None
+):
+ """
+ This method prepares a request dict to be created into an
+ AWSRequestObject. This prepares the request dict by adding the
+ url and the user agent to the request dict.
+
+ :type request_dict: dict
+ :param request_dict: The request dict (created from the
+ ``serialize`` module).
+
+ :type user_agent: string
+ :param user_agent: The user agent to use for this request.
+
+ :type endpoint_url: string
+ :param endpoint_url: The full endpoint url, which contains at least
+ the scheme, the hostname, and optionally any path components.
+ """
+ r = request_dict
+ if user_agent is not None:
+ headers = r['headers']
+ headers['User-Agent'] = user_agent
+ host_prefix = r.get('host_prefix')
+ url = _urljoin(endpoint_url, r['url_path'], host_prefix)
+ if r['query_string']:
+ # NOTE: This is to avoid circular import with utils. This is being
+ # done to avoid moving classes to different modules as to not cause
+ # breaking chainges.
+ percent_encode_sequence = botocore.utils.percent_encode_sequence
+ encoded_query_string = percent_encode_sequence(r['query_string'])
+ if '?' not in url:
+ url += f'?{encoded_query_string}'
+ else:
+ url += f'&{encoded_query_string}'
+ r['url'] = url
+ r['context'] = context
+ if context is None:
+ r['context'] = {}
+
+
+def create_request_object(request_dict):
+ """
+ This method takes a request dict and creates an AWSRequest object
+ from it.
+
+ :type request_dict: dict
+ :param request_dict: The request dict (created from the
+ ``prepare_request_dict`` method).
+
+ :rtype: ``botocore.awsrequest.AWSRequest``
+ :return: An AWSRequest object based on the request_dict.
+
+ """
+ r = request_dict
+ request_object = AWSRequest(
+ method=r['method'],
+ url=r['url'],
+ data=r['body'],
+ headers=r['headers'],
+ auth_path=r.get('auth_path'),
+ )
+ request_object.context = r['context']
+ return request_object
+
+
+def _urljoin(endpoint_url, url_path, host_prefix):
+ p = urlsplit(endpoint_url)
+ # -
+ # scheme - p[0]
+ # netloc - p[1]
+ # path - p[2]
+ # query - p[3]
+ # fragment - p[4]
+ if not url_path or url_path == '/':
+ # If there's no path component, ensure the URL ends with
+ # a '/' for backwards compatibility.
+ if not p[2]:
+ new_path = '/'
+ else:
+ new_path = p[2]
+ elif p[2].endswith('/') and url_path.startswith('/'):
+ new_path = p[2][:-1] + url_path
+ else:
+ new_path = p[2] + url_path
+
+ new_netloc = p[1]
+ if host_prefix is not None:
+ new_netloc = host_prefix + new_netloc
+
+ reconstructed = urlunsplit((p[0], new_netloc, new_path, p[3], p[4]))
+ return reconstructed
+
+
+class AWSRequestPreparer:
+ """
+ This class performs preparation on AWSRequest objects similar to that of
+ the PreparedRequest class does in the requests library. However, the logic
+ has been boiled down to meet the specific use cases in botocore. Of note
+ there are the following differences:
+ This class does not heavily prepare the URL. Requests performed many
+ validations and corrections to ensure the URL is properly formatted.
+ Botocore either performs these validations elsewhere or otherwise
+ consistently provides well formatted URLs.
+
+ This class does not heavily prepare the body. Body preperation is
+ simple and supports only the cases that we document: bytes and
+ file-like objects to determine the content-length. This will also
+ additionally prepare a body that is a dict to be url encoded params
+ string as some signers rely on this. Finally, this class does not
+ support multipart file uploads.
+
+ This class does not prepare the method, auth or cookies.
+ """
+
+ def prepare(self, original):
+ method = original.method
+ url = self._prepare_url(original)
+ body = self._prepare_body(original)
+ headers = self._prepare_headers(original, body)
+ stream_output = original.stream_output
+
+ return AWSPreparedRequest(method, url, headers, body, stream_output)
+
+ def _prepare_url(self, original):
+ url = original.url
+ if original.params:
+ url_parts = urlparse(url)
+ delim = '&' if url_parts.query else '?'
+ if isinstance(original.params, Mapping):
+ params_to_encode = list(original.params.items())
+ else:
+ params_to_encode = original.params
+ params = urlencode(params_to_encode, doseq=True)
+ url = delim.join((url, params))
+ return url
+
+ def _prepare_headers(self, original, prepared_body=None):
+ headers = HeadersDict(original.headers.items())
+
+ # If the transfer encoding or content length is already set, use that
+ if 'Transfer-Encoding' in headers or 'Content-Length' in headers:
+ return headers
+
+ # Ensure we set the content length when it is expected
+ if original.method not in ('GET', 'HEAD', 'OPTIONS'):
+ length = self._determine_content_length(prepared_body)
+ if length is not None:
+ headers['Content-Length'] = str(length)
+ else:
+ # Failed to determine content length, using chunked
+ # NOTE: This shouldn't ever happen in practice
+ body_type = type(prepared_body)
+ logger.debug('Failed to determine length of %s', body_type)
+ headers['Transfer-Encoding'] = 'chunked'
+
+ return headers
+
+ def _to_utf8(self, item):
+ key, value = item
+ if isinstance(key, str):
+ key = key.encode('utf-8')
+ if isinstance(value, str):
+ value = value.encode('utf-8')
+ return key, value
+
+ def _prepare_body(self, original):
+ """Prepares the given HTTP body data."""
+ body = original.data
+ if body == b'':
+ body = None
+
+ if isinstance(body, dict):
+ params = [self._to_utf8(item) for item in body.items()]
+ body = urlencode(params, doseq=True)
+
+ return body
+
+ def _determine_content_length(self, body):
+ return botocore.utils.determine_content_length(body)
+
+
+class AWSRequest:
+ """Represents the elements of an HTTP request.
+
+ This class was originally inspired by requests.models.Request, but has been
+ boiled down to meet the specific use cases in botocore. That being said this
+ class (even in requests) is effectively a named-tuple.
+ """
+
+ _REQUEST_PREPARER_CLS = AWSRequestPreparer
+
+ def __init__(
+ self,
+ method=None,
+ url=None,
+ headers=None,
+ data=None,
+ params=None,
+ auth_path=None,
+ stream_output=False,
+ ):
+ self._request_preparer = self._REQUEST_PREPARER_CLS()
+
+ # Default empty dicts for dict params.
+ params = {} if params is None else params
+
+ self.method = method
+ self.url = url
+ self.headers = HTTPHeaders()
+ self.data = data
+ self.params = params
+ self.auth_path = auth_path
+ self.stream_output = stream_output
+
+ if headers is not None:
+ for key, value in headers.items():
+ self.headers[key] = value
+
+ # This is a dictionary to hold information that is used when
+ # processing the request. What is inside of ``context`` is open-ended.
+ # For example, it may have a timestamp key that is used for holding
+ # what the timestamp is when signing the request. Note that none
+ # of the information that is inside of ``context`` is directly
+ # sent over the wire; the information is only used to assist in
+ # creating what is sent over the wire.
+ self.context = {}
+
+ def prepare(self):
+ """Constructs a :class:`AWSPreparedRequest `."""
+ return self._request_preparer.prepare(self)
+
+ @property
+ def body(self):
+ body = self.prepare().body
+ if isinstance(body, str):
+ body = body.encode('utf-8')
+ return body
+
+
+class AWSPreparedRequest:
+ """A data class representing a finalized request to be sent over the wire.
+
+ Requests at this stage should be treated as final, and the properties of
+ the request should not be modified.
+
+ :ivar method: The HTTP Method
+ :ivar url: The full url
+ :ivar headers: The HTTP headers to send.
+ :ivar body: The HTTP body.
+ :ivar stream_output: If the response for this request should be streamed.
+ """
+
+ def __init__(self, method, url, headers, body, stream_output):
+ self.method = method
+ self.url = url
+ self.headers = headers
+ self.body = body
+ self.stream_output = stream_output
+
+ def __repr__(self):
+ fmt = (
+ ''
+ )
+ return fmt % (self.stream_output, self.method, self.url, self.headers)
+
+ def reset_stream(self):
+ """Resets the streaming body to it's initial position.
+
+ If the request contains a streaming body (a streamable file-like object)
+ seek to the object's initial position to ensure the entire contents of
+ the object is sent. This is a no-op for static bytes-like body types.
+ """
+ # Trying to reset a stream when there is a no stream will
+ # just immediately return. It's not an error, it will produce
+ # the same result as if we had actually reset the stream (we'll send
+ # the entire body contents again if we need to).
+ # Same case if the body is a string/bytes/bytearray type.
+
+ non_seekable_types = (bytes, str, bytearray)
+ if self.body is None or isinstance(self.body, non_seekable_types):
+ return
+ try:
+ logger.debug("Rewinding stream: %s", self.body)
+ self.body.seek(0)
+ except Exception as e:
+ logger.debug("Unable to rewind stream: %s", e)
+ raise UnseekableStreamError(stream_object=self.body)
+
+
+class AWSResponse:
+ """A data class representing an HTTP response.
+
+ This class was originally inspired by requests.models.Response, but has
+ been boiled down to meet the specific use cases in botocore. This has
+ effectively been reduced to a named tuple.
+
+ :ivar url: The full url.
+ :ivar status_code: The status code of the HTTP response.
+ :ivar headers: The HTTP headers received.
+ :ivar body: The HTTP response body.
+ """
+
+ def __init__(self, url, status_code, headers, raw):
+ self.url = url
+ self.status_code = status_code
+ self.headers = HeadersDict(headers)
+ self.raw = raw
+
+ self._content = None
+
+ @property
+ def content(self):
+ """Content of the response as bytes."""
+
+ if self._content is None:
+ # Read the contents.
+ # NOTE: requests would attempt to call stream and fall back
+ # to a custom generator that would call read in a loop, but
+ # we don't rely on this behavior
+ self._content = b''.join(self.raw.stream()) or b''
+
+ return self._content
+
+ @property
+ def text(self):
+ """Content of the response as a proper text type.
+
+ Uses the encoding type provided in the reponse headers to decode the
+ response content into a proper text type. If the encoding is not
+ present in the headers, UTF-8 is used as a default.
+ """
+ encoding = botocore.utils.get_encoding_from_headers(self.headers)
+ if encoding:
+ return self.content.decode(encoding)
+ else:
+ return self.content.decode('utf-8')
+
+
+class _HeaderKey:
+ def __init__(self, key):
+ self._key = key
+ self._lower = key.lower()
+
+ def __hash__(self):
+ return hash(self._lower)
+
+ def __eq__(self, other):
+ return isinstance(other, _HeaderKey) and self._lower == other._lower
+
+ def __str__(self):
+ return self._key
+
+ def __repr__(self):
+ return repr(self._key)
+
+
+class HeadersDict(MutableMapping):
+ """A case-insenseitive dictionary to represent HTTP headers."""
+
+ def __init__(self, *args, **kwargs):
+ self._dict = {}
+ self.update(*args, **kwargs)
+
+ def __setitem__(self, key, value):
+ self._dict[_HeaderKey(key)] = value
+
+ def __getitem__(self, key):
+ return self._dict[_HeaderKey(key)]
+
+ def __delitem__(self, key):
+ del self._dict[_HeaderKey(key)]
+
+ def __iter__(self):
+ return (str(key) for key in self._dict)
+
+ def __len__(self):
+ return len(self._dict)
+
+ def __repr__(self):
+ return repr(self._dict)
+
+ def copy(self):
+ return HeadersDict(self.items())
diff --git a/py311/lib/python3.11/site-packages/botocore/cacert.pem b/py311/lib/python3.11/site-packages/botocore/cacert.pem
new file mode 100644
index 0000000000000000000000000000000000000000..919478ed06ae84199e3afc23c22eec96365886c5
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/cacert.pem
@@ -0,0 +1,4361 @@
+
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Label: "GlobalSign Root CA - R2"
+# Serial: 4835703278459682885658125
+# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
+# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
+# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
+-----BEGIN CERTIFICATE-----
+MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
+MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
+v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
+eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
+tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
+C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
+zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
+mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
+V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
+bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
+3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
+J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
+291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
+ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
+AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
+TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
+# Serial: 206684696279472310254277870180966723415
+# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
+# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
+# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
+N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
+KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
+kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
+CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
+Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
+imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
+2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
+DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
+/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
+F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
+TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946069240
+# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
+# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
+# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
+MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
+j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
+U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
+zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
+u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
+bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
+fF6adulZkMV8gzURZVE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Label: "AddTrust External Root"
+# Serial: 1
+# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
+# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
+# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
+-----BEGIN CERTIFICATE-----
+MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
+IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
+MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
+FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
+bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
+dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
+H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
+uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
+mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
+a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
+E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
+WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
+VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
+Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
+cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
+IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
+AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
+YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
+6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
+Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
+c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
+mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Label: "GeoTrust Global CA"
+# Serial: 144470
+# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
+# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
+# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
+-----BEGIN CERTIFICATE-----
+MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
+YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
+R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
+9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
+fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
+iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
+1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
+MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
+ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
+uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
+Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
+tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
+PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
+hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
+5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA"
+# Serial: 1
+# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
+# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
+# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
+-----BEGIN CERTIFICATE-----
+MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
+BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
+IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
+VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
+cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
+QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
+F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
+c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
+mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
+VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
+teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
+f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
+Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
+nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
+/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
+MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
+9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
+aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
+IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
+ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
+uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
+Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
+QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
+koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
+ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
+DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
+bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA 2"
+# Serial: 1
+# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
+# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
+# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
+VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
+c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
+WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
+FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
+XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
+se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
+KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
+IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
+y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
+hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
+QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
+Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
+HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
+KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
+dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
+L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
+Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
+ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
+T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
+GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
+1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
+OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
+6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
+QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
+-----END CERTIFICATE-----
+
+# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
+# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
+# Label: "Visa eCommerce Root"
+# Serial: 25952180776285836048024890241505565794
+# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02
+# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62
+# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22
+-----BEGIN CERTIFICATE-----
+MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr
+MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl
+cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv
+bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw
+CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h
+dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l
+cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h
+2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E
+lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV
+ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq
+299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t
+vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL
+dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF
+AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR
+zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3
+LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd
+7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw
+++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt
+398znM/jra6O1I7mT1GvFpLgXPYHDw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
+# Label: "QuoVadis Root CA"
+# Serial: 985026699
+# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24
+# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9
+# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73
+-----BEGIN CERTIFICATE-----
+MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz
+MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw
+IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR
+dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp
+li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D
+rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ
+WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug
+F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU
+xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC
+Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv
+dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw
+ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl
+IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh
+c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy
+ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh
+Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI
+KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T
+KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq
+y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p
+dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD
+VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL
+MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk
+fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8
+7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R
+cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y
+mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW
+xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK
+SnQ2+Q==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2"
+# Serial: 1289
+# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
+# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
+# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
+-----BEGIN CERTIFICATE-----
+MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
+GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
+Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
+WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
+rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
+ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
+Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
+PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
+/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
+oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
+yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
+EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
+A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
+MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
+ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
+BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
+g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
+fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
+WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
+B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
+hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
+TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
+mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
+ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
+4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
+8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3"
+# Serial: 1478
+# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
+# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
+# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
+-----BEGIN CERTIFICATE-----
+MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
+V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
+4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
+H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
+8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
+vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
+mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
+btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
+T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
+WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
+c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
+4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
+VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
+CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
+aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
+aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
+dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
+czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
+A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
+Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
+7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
+d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
+4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
+t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
+DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
+k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
+zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
+Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
+mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
+4SVhM7JZG+Ju1zdXtg2pEto=
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
+# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
+# Label: "Security Communication Root CA"
+# Serial: 0
+# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
+# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
+# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
+-----BEGIN CERTIFICATE-----
+MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
+MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
+dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
+WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
+VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
+9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
+DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
+Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
+QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
+xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
+A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
+kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
+Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
+Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
+JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
+RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sonera Class2 CA O=Sonera
+# Subject: CN=Sonera Class2 CA O=Sonera
+# Label: "Sonera Class 2 Root CA"
+# Serial: 29
+# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb
+# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27
+# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP
+MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx
+MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV
+BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o
+Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt
+5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s
+3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej
+vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu
+8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw
+DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG
+MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil
+zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/
+3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD
+FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6
+Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2
+ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: O=Government Root Certification Authority
+# Subject: O=Government Root Certification Authority
+# Label: "Taiwan GRCA"
+# Serial: 42023070807708724159991140556527066870
+# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e
+# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9
+# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3
+-----BEGIN CERTIFICATE-----
+MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/
+MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow
+PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR
+IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q
+gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy
+yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts
+F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2
+jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx
+ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC
+VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK
+YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH
+EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN
+Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud
+DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE
+MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK
+UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ
+TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf
+qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK
+ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE
+JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7
+hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1
+EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm
+nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX
+udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz
+ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe
+LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl
+pYYsfPQS
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=Class 2 Primary CA O=Certplus
+# Subject: CN=Class 2 Primary CA O=Certplus
+# Label: "Certplus Class 2 Primary CA"
+# Serial: 177770208045934040241468760488327595043
+# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b
+# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb
+# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb
+-----BEGIN CERTIFICATE-----
+MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw
+PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz
+cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9
+MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz
+IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ
+ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR
+VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL
+kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd
+EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas
+H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0
+HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud
+DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4
+QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu
+Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/
+AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8
+yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR
+FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA
+ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB
+kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7
+l7+ijrRU
+-----END CERTIFICATE-----
+
+# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co.
+# Label: "DST Root CA X3"
+# Serial: 91299735575339953335919266965803778155
+# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5
+# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13
+# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39
+-----BEGIN CERTIFICATE-----
+MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
+MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
+DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
+PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
+Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
+rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
+OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
+xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
+7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
+aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
+SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
+ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
+AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
+R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
+JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
+Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Label: "SwissSign Gold CA - G2"
+# Serial: 13492815561806991280
+# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
+# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
+# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
+-----BEGIN CERTIFICATE-----
+MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
+BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
+biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
+MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
+d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
+76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
+bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
+6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
+emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
+MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
+MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
+MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
+FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
+aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
+gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
+qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
+lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
+8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
+L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
+45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
+UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
+O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
+bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
+GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
+77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
+hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
+92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
+Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
+ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
+Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Label: "SwissSign Silver CA - G2"
+# Serial: 5700383053117599563
+# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
+# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
+# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
+-----BEGIN CERTIFICATE-----
+MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
+BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
+IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
+RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
+U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
+MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
+Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
+YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
+nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
+6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
+eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
+c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
+MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
+HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
+jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
+5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
+rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
+wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
+cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
+AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
+WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
+xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
+2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
+IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
+aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
+em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
+dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
+OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
+tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Label: "GeoTrust Primary Certification Authority"
+# Serial: 32798226551256963324313806436981982369
+# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
+# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
+# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
+-----BEGIN CERTIFICATE-----
+MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
+MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
+R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
+MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
+Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
+AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
+ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
+7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
+kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
+mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
+KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
+6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
+4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
+oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
+UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
+AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA"
+# Serial: 69529181992039203566298953787712940909
+# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
+# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
+# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
+qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
+BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
+NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
+LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
+A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
+IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
+W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
+3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
+6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
+Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
+NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
+r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
+DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
+YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
+xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
+/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
+LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
+jVaMaA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
+# Serial: 33037644167568058970164719475676101450
+# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
+# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
+# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
+-----BEGIN CERTIFICATE-----
+MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
+yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
+ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
+nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
+t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
+SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
+BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
+rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
+NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
+BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
+BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
+aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
+MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
+p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
+5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
+WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
+4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
+hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
+# Subject: CN=SecureTrust CA O=SecureTrust Corporation
+# Label: "SecureTrust CA"
+# Serial: 17199774589125277788362757014266862032
+# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
+# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
+# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
+-----BEGIN CERTIFICATE-----
+MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
+MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
+cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
+Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
+0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
+wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
+7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
+8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
+BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
+JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
+6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
+3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
+D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
+CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
+3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Global CA O=SecureTrust Corporation
+# Subject: CN=Secure Global CA O=SecureTrust Corporation
+# Label: "Secure Global CA"
+# Serial: 9751836167731051554232119481456978597
+# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
+# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
+# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
+-----BEGIN CERTIFICATE-----
+MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
+MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
+Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
+iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
+/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
+jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
+HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
+sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
+gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
+KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
+AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
+URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
+H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
+I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
+iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
+f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Label: "Network Solutions Certificate Authority"
+# Serial: 116697915152937497490437556386812487904
+# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
+# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
+# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
+-----BEGIN CERTIFICATE-----
+MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
+MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
+MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
+dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
+UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
+ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
+c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
+OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
+mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
+BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
+qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
+gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
+bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
+dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
+6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
+h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
+/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
+wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
+pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GA CA"
+# Serial: 86718877871133159090080555911823548314
+# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93
+# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9
+# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5
+-----BEGIN CERTIFICATE-----
+MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB
+ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly
+aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl
+ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w
+NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G
+A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD
+VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX
+SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR
+VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2
+w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF
+mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg
+4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9
+4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw
+EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx
+SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2
+ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8
+vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa
+hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi
+Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ
+/L7fCg0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna O=Dhimyotis
+# Subject: CN=Certigna O=Dhimyotis
+# Label: "Certigna"
+# Serial: 18364802974209362175
+# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
+# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
+# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
+-----BEGIN CERTIFICATE-----
+MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
+BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
+DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
+BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
+QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
+gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
+zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
+130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
+JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
+ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
+AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
+AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
+9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
+bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
+fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
+HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
+t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
+WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
+# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center
+# Label: "Deutsche Telekom Root CA 2"
+# Serial: 38
+# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08
+# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf
+# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3
+-----BEGIN CERTIFICATE-----
+MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc
+MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj
+IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB
+IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE
+RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl
+U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290
+IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU
+ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC
+QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr
+rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S
+NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc
+QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH
+txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP
+BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
+AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp
+tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa
+IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl
+6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+
+xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU
+Cm26OWMohpLzGITY+9HPBVZkVw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Label: "Cybertrust Global Root"
+# Serial: 4835703278459682877484360
+# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
+# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
+# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
+-----BEGIN CERTIFICATE-----
+MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
+A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
+bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
+ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
+b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
+7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
+J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
+HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
+t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
+FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
+XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
+hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
+MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
+A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
+Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
+XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
+omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
+A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
+WL1WMRJOEcgh4LMRkWXbtKaIOM5V
+-----END CERTIFICATE-----
+
+# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Label: "ePKI Root Certification Authority"
+# Serial: 28956088682735189655030529057352760477
+# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
+# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
+# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
+-----BEGIN CERTIFICATE-----
+MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
+IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
+SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
+SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
+ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
+DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
+TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
+fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
+sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
+WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
+nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
+dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
+NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
+AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
+MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
+ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
+uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
+PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
+JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
+gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
+j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
+5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
+o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
+/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
+Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
+W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
+hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
+-----END CERTIFICATE-----
+
+# Issuer: O=certSIGN OU=certSIGN ROOT CA
+# Subject: O=certSIGN OU=certSIGN ROOT CA
+# Label: "certSIGN ROOT CA"
+# Serial: 35210227249154
+# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
+# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
+# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
+-----BEGIN CERTIFICATE-----
+MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
+AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
+QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
+MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
+0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
+UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
+RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
+OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
+JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
+AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
+BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
+LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
+MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
+44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
+Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
+i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
+9u6wWk5JRFRYX0KD
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G3"
+# Serial: 28809105769928564313984085209975885599
+# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
+# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
+# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
+MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
+eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
+cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
+BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
+MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
+BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
+hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
+5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
+JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
+DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
+huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
+HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
+AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
+zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
+kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
+AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
+SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
+spki4cErx5z481+oghLrGREt
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G2"
+# Serial: 71758320672825410020661621085256472406
+# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
+# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
+# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
+-----BEGIN CERTIFICATE-----
+MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
+IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
+BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
+MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
+d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
+YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
+dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
+BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
+papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
+DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
+KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
+XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G3"
+# Serial: 127614157056681299805556476275995414779
+# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
+# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
+# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
+rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
+BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
+Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
+LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
+MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
+ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
+gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
+YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
+b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
+9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
+zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
+OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
+HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
+2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
+oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
+t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
+KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
+m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
+MdRAGmI0Nj81Aa6sY6A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G2"
+# Serial: 80682863203381065782177908751794619243
+# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
+# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
+# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
+MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
+KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
+MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
+eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
+BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
+NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
+BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
+MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
+So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
+tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
+CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
+qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
+rD6ogRLQy7rQkgu2npaqBA+K
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Universal Root Certification Authority"
+# Serial: 85209574734084581917763752644031726877
+# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
+# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
+# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
+-----BEGIN CERTIFICATE-----
+MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
+vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
+ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
+IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
+IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
+bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
+9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
+H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
+LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
+/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
+rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
+WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
+exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
+DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
+sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
+seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
+4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
+lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
+7M2CYfE45k+XmCpajQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
+# Serial: 63143484348153506665311985501458640051
+# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
+# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
+# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
+-----BEGIN CERTIFICATE-----
+MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
+U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
+SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
+biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
+GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
+fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
+aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
+aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
+kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
+4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
+FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny"
+# Serial: 80544274841616
+# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
+# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
+# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
+EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
+MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
+cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
+dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
+pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
+b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
+aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
+IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
+lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
+AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
+VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
+ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
+BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
+AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
+U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
+bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
+bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
+uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
+XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G2"
+# Serial: 10000012
+# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a
+# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16
+# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f
+-----BEGIN CERTIFICATE-----
+MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX
+DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291
+qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp
+uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU
+Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE
+pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp
+5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M
+UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN
+GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy
+5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv
+6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK
+eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6
+B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/
+BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov
+L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG
+SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS
+CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen
+5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897
+IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK
+gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL
++63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL
+vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm
+bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk
+N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC
+Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z
+ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Label: "SecureSign RootCA11"
+# Serial: 1
+# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
+# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
+# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
+-----BEGIN CERTIFICATE-----
+MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
+MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
+A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
+MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
+Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
+QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
+i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
+h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
+MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
+UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
+8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
+h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
+VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
+KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
+X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
+QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
+pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
+QSdJQO7e5iNEOdyhIta6A/I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Label: "Microsec e-Szigno Root CA 2009"
+# Serial: 14014712776195784473
+# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
+# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
+# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
+-----BEGIN CERTIFICATE-----
+MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
+ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
+CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
+OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
+FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
+Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
+dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
+kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
+cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
+fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
+N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
+xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
+Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
+SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
+mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
+ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
+tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
+2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
+HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
+# Serial: 6047274297262753887
+# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
+# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
+# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
+MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
+VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
+VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
+cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
+ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
+AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
+661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
+am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
+ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
+PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
+3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
+SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
+3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
+ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
+StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
+Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
+jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
+-----END CERTIFICATE-----
+
+# Issuer: CN=Izenpe.com O=IZENPE S.A.
+# Subject: CN=Izenpe.com O=IZENPE S.A.
+# Label: "Izenpe.com"
+# Serial: 917563065490389241595536686991402621
+# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
+# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
+# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
+-----BEGIN CERTIFICATE-----
+MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
+MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
+ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
+VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
+b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
+scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
+xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
+LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
+uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
+yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
+rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
+BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
+hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
+QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
+HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
+Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
+QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
+BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
+MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
+A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
+laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
+awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
+JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
+LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
+VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
+LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
+UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
+QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
+QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
+# Label: "Chambers of Commerce Root - 2008"
+# Serial: 11806822484801597146
+# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7
+# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c
+# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0
+-----BEGIN CERTIFICATE-----
+MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz
+IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz
+MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj
+dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw
+EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp
+MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9
+28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq
+VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q
+DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR
+5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL
+ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a
+Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl
+UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s
++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5
+Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj
+ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx
+hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV
+HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1
++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN
+YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t
+L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy
+ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt
+IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV
+HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w
+DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW
+PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF
+5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1
+glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH
+FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2
+pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD
+xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG
+tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq
+jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De
+fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg
+OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ
+d0jQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
+# Label: "Global Chambersign Root - 2008"
+# Serial: 14541511773111788494
+# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3
+# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c
+# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca
+-----BEGIN CERTIFICATE-----
+MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD
+aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx
+MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy
+cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG
+A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl
+BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI
+hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed
+KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7
+G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2
+zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4
+ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG
+HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2
+Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V
+yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e
+beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r
+6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh
+wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog
+zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW
+BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr
+ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp
+ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk
+cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt
+YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC
+CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow
+KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI
+hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ
+UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz
+X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x
+fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz
+a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd
+Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd
+SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O
+AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso
+M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge
+v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z
+09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA"
+# Serial: 279744
+# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
+# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
+# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
+-----BEGIN CERTIFICATE-----
+MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
+MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
+ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
+cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
+WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
+Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
+IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
+UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
+TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
+BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
+kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
+AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
+HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
+sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
+I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
+J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
+VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
+03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Root Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
+# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
+# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
+MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
+V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
+WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
+LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
+AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
+K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
+RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
+rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
+3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
+hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
+MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
+XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
+lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
+aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
+YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Label: "Security Communication RootCA2"
+# Serial: 0
+# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
+# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
+# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
+DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
+dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
+YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
+OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
+zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
+VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
+hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
+ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
+awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
+OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
+DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
+coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
+okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
+t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
+1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
+SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2011"
+# Serial: 0
+# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9
+# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d
+# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71
+-----BEGIN CERTIFICATE-----
+MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix
+RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p
+YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw
+NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK
+EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl
+cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz
+dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ
+fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns
+bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD
+75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP
+FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV
+HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp
+5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu
+b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA
+A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p
+6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8
+TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7
+dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys
+Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI
+l7WdmplNsDz4SgCbZN2fOUvRJ9e4
+-----END CERTIFICATE-----
+
+# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Label: "Actalis Authentication Root CA"
+# Serial: 6271844772424770508
+# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
+# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
+# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
+-----BEGIN CERTIFICATE-----
+MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
+BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
+MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
+IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
+SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
+ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
+UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
+4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
+KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
+gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
+rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
+51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
+be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
+KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
+v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
+fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
+jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
+ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
+ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
+e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
+jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
+WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
+SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
+pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
+X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
+fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
+K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
+ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
+LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
+LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Trustis Limited OU=Trustis FPS Root CA
+# Subject: O=Trustis Limited OU=Trustis FPS Root CA
+# Label: "Trustis FPS Root CA"
+# Serial: 36053640375399034304724988975563710553
+# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d
+# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04
+# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d
+-----BEGIN CERTIFICATE-----
+MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF
+MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL
+ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx
+MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc
+MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+
+AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH
+iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj
+vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA
+0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB
+OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/
+BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E
+FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01
+GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW
+zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4
+1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE
+f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F
+jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN
+ZetX2fNXlrtIzYE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 2 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
+# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
+# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
+6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
+L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
+1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
+MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
+QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
+arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
+Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
+FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
+P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
+9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
+uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
+9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
+A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
+OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
+KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
+DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
+H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
+I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
+5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
+3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
+Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 3 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
+# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
+# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
+ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
+N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
+tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
+0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
+/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
+KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
+zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
+O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
+34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
+K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
+Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
+QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
+cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
+IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
+HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
+O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
+033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
+dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
+kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
+3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
+u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
+4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 3"
+# Serial: 1
+# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
+# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
+# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
+8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
+RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
+hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
+ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
+EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
+A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
+WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
+1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
+6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
+91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
+e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
+TpPDpFQUWw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
+# Label: "EE Certification Centre Root CA"
+# Serial: 112324828676200291871926431888494945866
+# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f
+# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7
+# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76
+-----BEGIN CERTIFICATE-----
+MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1
+MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1
+czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG
+CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy
+MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl
+ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS
+b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB
+AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy
+euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO
+bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw
+WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d
+MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE
+1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD
+VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/
+zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB
+BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF
+BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV
+v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG
+E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u
+uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW
+iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v
+GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 2009"
+# Serial: 623603
+# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
+# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
+# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
+-----BEGIN CERTIFICATE-----
+MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
+ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
+HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
+UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
+tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
+ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
+lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
+/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
+A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
+A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
+dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
+MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
+cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
+L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
+BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
+acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
+o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
+zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
+PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
+Johw1+qRzT65ysCQblrGXnRl11z+o+I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
+# Serial: 623604
+# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
+# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
+# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
+NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
+BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
+ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
+3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
+qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
+p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
+HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
+ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
+HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
+Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
+c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
+RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
+dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
+Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
+3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
+nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
+CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
+xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
+KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig Root R2 O=Disig a.s.
+# Subject: CN=CA Disig Root R2 O=Disig a.s.
+# Label: "CA Disig Root R2"
+# Serial: 10572350602393338211
+# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
+# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
+# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
+-----BEGIN CERTIFICATE-----
+MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
+BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
+MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
+MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
+EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
+ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
+NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
+PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
+x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
+QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
+yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
+QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
+H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
+QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
+i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
+nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
+rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
+DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
+hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
+tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
+GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
+lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
+TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
+nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
+gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
+G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
+zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
+L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
+-----END CERTIFICATE-----
+
+# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Label: "ACCVRAIZ1"
+# Serial: 6828503384748696800
+# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
+# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
+# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
+-----BEGIN CERTIFICATE-----
+MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
+AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
+CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
+BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
+VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
+qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
+HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
+G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
+lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
+IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
+0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
+k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
+4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
+m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
+cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
+uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
+KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
+ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
+AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
+VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
+VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
+CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
+cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
+QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
+7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
+cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
+QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
+czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
+aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
+aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
+DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
+BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
+D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
+JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
+AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
+vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
+tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
+7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
+I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
+h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
+d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
+pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Global Root CA"
+# Serial: 3262
+# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
+# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
+# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
+EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
+VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
+NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
+B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
+10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
+0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
+MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
+zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
+46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
+yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
+laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
+oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
+BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
+qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
+4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
+1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
+LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
+H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
+RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
+nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
+15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
+6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
+nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
+wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
+aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
+KwbQBM0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Label: "TeliaSonera Root CA v1"
+# Serial: 199041966741090107964904287217786801558
+# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
+# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
+# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
+-----BEGIN CERTIFICATE-----
+MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
+NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
+b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
+VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
+VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
+7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
+Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
+/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
+81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
+dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
+Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
+sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
+pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
+slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
+arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
+VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
+9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
+dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
+0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
+TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
+Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
+Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
+OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
+vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
+t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
+HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
+SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 2"
+# Serial: 1
+# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
+# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
+# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
+AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
+FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
+1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
+jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
+wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
+WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
+NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
+uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
+IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
+g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
+9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
+BSeOE6Fuwg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot 2011 O=Atos
+# Subject: CN=Atos TrustedRoot 2011 O=Atos
+# Label: "Atos TrustedRoot 2011"
+# Serial: 6643877497813316402
+# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
+# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
+# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
+AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
+EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
+FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
+REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
+Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
+VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
+SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
+4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
+cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
+eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
+A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
+DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
+vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
+DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
+maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
+lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
+KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 1 G3"
+# Serial: 687049649626669250736271037606554624078720034195
+# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
+# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
+# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
+MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
+wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
+rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
+68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
+4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
+UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
+abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
+3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
+KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
+hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
+Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
+zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
+ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
+MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
+cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
+qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
+YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
+b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
+8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
+NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
+ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
+q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
+nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2 G3"
+# Serial: 390156079458959257446133169266079962026824725800
+# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
+# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
+# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
+MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
+qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
+n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
+c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
+O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
+o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
+IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
+IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
+8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
+vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
+7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
+cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
+ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
+AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
+roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
+W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
+lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
+csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
+dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
+KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
+HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
+WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3 G3"
+# Serial: 268090761170461462463995952157327242137089239581
+# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
+# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
+# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
+MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
+/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
+FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
+U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
+ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
+FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
+A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
+eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
+sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
+VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
+A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
+ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
+ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
+KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
+FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
+oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
+u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
+0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
+3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
+8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
+DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
+PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
+ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G2"
+# Serial: 15385348160840213938643033620894905419
+# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
+# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
+# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
+-----BEGIN CERTIFICATE-----
+MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
+n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
+biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
+EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
+bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
+YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
+AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
+BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
+QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
+0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
+lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
+B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
+ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
+IhNzbM8m9Yop5w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G3"
+# Serial: 15459312981008553731928384953135426796
+# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
+# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
+# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
+-----BEGIN CERTIFICATE-----
+MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
+RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
+Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
+RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
+AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
+JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
+6pZjamVFkpUBtA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G2"
+# Serial: 4293743540046975378534879503202253541
+# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
+# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
+# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G3"
+# Serial: 7089244469030293291760083333884364146
+# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
+# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
+# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
+-----BEGIN CERTIFICATE-----
+MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
+Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
+EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
+IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
+fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
+Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
+BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
+AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
+oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
+sycX
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Trusted Root G4"
+# Serial: 7451500558977370777930084869016614236
+# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
+# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
+# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
+-----BEGIN CERTIFICATE-----
+MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
+RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
+ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
+xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
+ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
+DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
+jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
+CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
+EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
+fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
+uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
+chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
+9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
+ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
+SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
+fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
+sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
+cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
+0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
+4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
+r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
+/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
+gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Label: "COMODO RSA Certification Authority"
+# Serial: 101909084537582093308941363524873193117
+# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
+# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
+# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
+-----BEGIN CERTIFICATE-----
+MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
+hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
+BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
+EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
+Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
+6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
+pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
+9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
+/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
+Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
+qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
+SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
+u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
+Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
+crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
+FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
+/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
+wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
+4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
+2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
+FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
+CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
+boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
+jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
+S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
+QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
+0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
+NVOFBkpdn627G190
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Label: "USERTrust RSA Certification Authority"
+# Serial: 2645093764781058787591871645665788717
+# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
+# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
+# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
+-----BEGIN CERTIFICATE-----
+MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
+iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
+cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
+BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
+MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
+BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
+aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
+3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
+tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
+Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
+VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
+79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
+c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
+Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
+c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
+UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
+Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
+BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
+A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
+Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
+VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
+ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
+8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
+iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
+Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
+XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
+qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
+VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
+L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
+jjxDah2nGN59PRbxYvnKkKj9
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Label: "USERTrust ECC Certification Authority"
+# Serial: 123013823720199481456569720443997572134
+# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
+# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
+# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
+-----BEGIN CERTIFICATE-----
+MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
+MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
+eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
+JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
+Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
+VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
+I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
+o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
+A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
+zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
+RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Label: "GlobalSign ECC Root CA - R4"
+# Serial: 14367148294922964480859022125800977897474
+# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e
+# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb
+# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c
+-----BEGIN CERTIFICATE-----
+MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ
+FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F
+uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX
+kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs
+ewv4n4Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Label: "GlobalSign ECC Root CA - R5"
+# Serial: 32785792099990507226680698011560947931244
+# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
+# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
+# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
+-----BEGIN CERTIFICATE-----
+MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
+8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
+hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
+KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
+515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
+xwy8p2Fp8fc74SrL+SvzZpA3
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
+# Label: "Staat der Nederlanden Root CA - G3"
+# Serial: 10003001
+# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37
+# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc
+# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX
+DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP
+cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW
+IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX
+xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy
+KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR
+9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az
+5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8
+6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7
+Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP
+bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt
+BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt
+XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd
+INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD
+U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp
+LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8
+Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp
+gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh
+/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw
+0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A
+fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq
+4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR
+1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/
+QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM
+94B7IWcnMFk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
+# Label: "Staat der Nederlanden EV Root CA"
+# Serial: 10000013
+# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba
+# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb
+# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a
+-----BEGIN CERTIFICATE-----
+MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y
+MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg
+TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS
+b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS
+M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC
+UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d
+Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p
+rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l
+pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb
+j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC
+KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS
+/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X
+cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH
+1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP
+px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7
+MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI
+eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u
+2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS
+v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC
+wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy
+CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e
+vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6
+Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa
+Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL
+eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8
+FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc
+7uzXLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Label: "IdenTrust Commercial Root CA 1"
+# Serial: 13298821034946342390520003877796839426
+# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
+# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
+# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
+VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
+MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
+JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
+3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
+S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
+bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
+T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
+vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
+Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
+dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
+c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
+l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
+iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
+ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
+6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
+LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
+nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
+W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
+AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
+l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
+4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
+mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
+7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Label: "IdenTrust Public Sector Root CA 1"
+# Serial: 13298821034946342390521976156843933698
+# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
+# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
+# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
+VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
+MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
+MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
+ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
+RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
+bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
+/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
+3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
+EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
+9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
+GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
+2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
+WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
+W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
+AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
+t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
+DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
+TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
+lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
+mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
+WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
+tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
+GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
+8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G2"
+# Serial: 1246989352
+# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2
+# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4
+# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39
+-----BEGIN CERTIFICATE-----
+MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
+cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
+IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
+dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
+NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
+dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
+dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
+aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
+RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
+cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
+wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
+U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
+jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
+BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
+jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
+Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
+1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
+nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
+VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - EC1"
+# Serial: 51543124481930649114116133369
+# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc
+# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47
+# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5
+-----BEGIN CERTIFICATE-----
+MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
+A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
+d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
+dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
+RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
+MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
+VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
+L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
+Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
+ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
+A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
+ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
+Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
+R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
+hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
+-----END CERTIFICATE-----
+
+# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Label: "CFCA EV ROOT"
+# Serial: 407555286
+# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
+# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
+# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
+-----BEGIN CERTIFICATE-----
+MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
+TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
+MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
+aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
+T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
+sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
+TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
+/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
+7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
+EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
+hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
+a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
+aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
+TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
+PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
+cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
+tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
+BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
+ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
+ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
+jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
+ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
+P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
+xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
+Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
+5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
+/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
+AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
+5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
+-----END CERTIFICATE-----
+
+# Issuer: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e.
+# Subject: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e.
+# Label: "T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5"
+# Serial: 156233699172481
+# MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e
+# SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb
+# SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78
+-----BEGIN CERTIFICATE-----
+MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UE
+BhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxn
+aSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkg
+QS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2Eg
+SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0
+MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYD
+VQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8
+dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBF
+bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIB
+IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom
+/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kR
+Gml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh3
+4khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z
+5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0
+hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QID
+AQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsX
+SDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0l
+VX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq
+URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nf
+peYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CF
+Yv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW
++qtB4Uu2NQvAmxU=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
+# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
+# Label: "Certinomis - Root CA"
+# Serial: 1
+# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f
+# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8
+# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58
+-----BEGIN CERTIFICATE-----
+MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET
+MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb
+BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz
+MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx
+FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g
+Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2
+fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl
+LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV
+WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF
+TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb
+5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc
+CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri
+wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ
+wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG
+m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4
+F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng
+WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0
+2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF
+AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/
+0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw
+F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS
+g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj
+qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN
+h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/
+ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V
+btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj
+Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ
+8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW
+gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GB CA"
+# Serial: 157768595616588414422159278966750757568
+# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d
+# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed
+# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6
+-----BEGIN CERTIFICATE-----
+MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt
+MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg
+Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i
+YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x
+CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG
+b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh
+bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3
+HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx
+WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX
+1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk
+u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P
+99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r
+M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB
+BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh
+cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5
+gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO
+ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf
+aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic
+Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Label: "SZAFIR ROOT CA2"
+# Serial: 357043034767186914217277344587386743377558296292
+# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99
+# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de
+# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe
+-----BEGIN CERTIFICATE-----
+MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL
+BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6
+ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw
+NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L
+cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg
+Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN
+QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT
+3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw
+3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6
+3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5
+BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN
+XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF
+AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw
+8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG
+nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP
+oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy
+d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg
+LvWpCz/UXeHPhJ/iGcJfitYgHuNztw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA 2"
+# Serial: 44979900017204383099463764357512596969
+# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2
+# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92
+# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04
+-----BEGIN CERTIFICATE-----
+MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB
+gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu
+QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG
+A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz
+OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ
+VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3
+b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA
+DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn
+0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB
+OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE
+fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E
+Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m
+o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i
+sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW
+OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez
+Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS
+adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n
+3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC
+AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ
+F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf
+CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29
+XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm
+djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/
+WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb
+AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq
+P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko
+b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj
+XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P
+5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi
+DrW5viSP
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce
+# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6
+# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36
+-----BEGIN CERTIFICATE-----
+MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix
+DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k
+IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT
+N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v
+dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG
+A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh
+ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx
+QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA
+4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0
+AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10
+4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C
+ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV
+9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD
+gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6
+Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq
+NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko
+LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc
+Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd
+ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I
+XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI
+M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot
+9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V
+Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea
+j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh
+X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ
+l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf
+bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4
+pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK
+e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0
+vm9qp/UsQu0yrbYhnr68
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef
+# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66
+# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33
+-----BEGIN CERTIFICATE-----
+MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN
+BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl
+bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv
+b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ
+BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj
+YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5
+MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0
+dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg
+QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa
+jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC
+MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi
+C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep
+lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof
+TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certplus Root CA G1 O=Certplus
+# Subject: CN=Certplus Root CA G1 O=Certplus
+# Label: "Certplus Root CA G1"
+# Serial: 1491911565779898356709731176965615564637713
+# MD5 Fingerprint: 7f:09:9c:f7:d9:b9:5c:69:69:56:d5:37:3e:14:0d:42
+# SHA1 Fingerprint: 22:fd:d0:b7:fd:a2:4e:0d:ac:49:2c:a0:ac:a6:7b:6a:1f:e3:f7:66
+# SHA256 Fingerprint: 15:2a:40:2b:fc:df:2c:d5:48:05:4d:22:75:b3:9c:7f:ca:3e:c0:97:80:78:b0:f0:ea:76:e5:61:a6:c7:43:3e
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgISESBVg+QtPlRWhS2DN7cs3EYRMA0GCSqGSIb3DQEBDQUA
+MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy
+dHBsdXMgUm9vdCBDQSBHMTAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBa
+MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy
+dHBsdXMgUm9vdCBDQSBHMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+ANpQh7bauKk+nWT6VjOaVj0W5QOVsjQcmm1iBdTYj+eJZJ+622SLZOZ5KmHNr49a
+iZFluVj8tANfkT8tEBXgfs+8/H9DZ6itXjYj2JizTfNDnjl8KvzsiNWI7nC9hRYt
+6kuJPKNxQv4c/dMcLRC4hlTqQ7jbxofaqK6AJc96Jh2qkbBIb6613p7Y1/oA/caP
+0FG7Yn2ksYyy/yARujVjBYZHYEMzkPZHogNPlk2dT8Hq6pyi/jQu3rfKG3akt62f
+6ajUeD94/vI4CTYd0hYCyOwqaK/1jpTvLRN6HkJKHRUxrgwEV/xhc/MxVoYxgKDE
+EW4wduOU8F8ExKyHcomYxZ3MVwia9Az8fXoFOvpHgDm2z4QTd28n6v+WZxcIbekN
+1iNQMLAVdBM+5S//Ds3EC0pd8NgAM0lm66EYfFkuPSi5YXHLtaW6uOrc4nBvCGrc
+h2c0798wct3zyT8j/zXhviEpIDCB5BmlIOklynMxdCm+4kLV87ImZsdo/Rmz5yCT
+mehd4F6H50boJZwKKSTUzViGUkAksnsPmBIgJPaQbEfIDbsYIC7Z/fyL8inqh3SV
+4EJQeIQEQWGw9CEjjy3LKCHyamz0GqbFFLQ3ZU+V/YDI+HLlJWvEYLF7bY5KinPO
+WftwenMGE9nTdDckQQoRb5fc5+R+ob0V8rqHDz1oihYHAgMBAAGjYzBhMA4GA1Ud
+DwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSowcCbkahDFXxd
+Bie0KlHYlwuBsTAfBgNVHSMEGDAWgBSowcCbkahDFXxdBie0KlHYlwuBsTANBgkq
+hkiG9w0BAQ0FAAOCAgEAnFZvAX7RvUz1isbwJh/k4DgYzDLDKTudQSk0YcbX8ACh
+66Ryj5QXvBMsdbRX7gp8CXrc1cqh0DQT+Hern+X+2B50ioUHj3/MeXrKls3N/U/7
+/SMNkPX0XtPGYX2eEeAC7gkE2Qfdpoq3DIMku4NQkv5gdRE+2J2winq14J2by5BS
+S7CTKtQ+FjPlnsZlFT5kOwQ/2wyPX1wdaR+v8+khjPPvl/aatxm2hHSco1S1cE5j
+2FddUyGbQJJD+tZ3VTNPZNX70Cxqjm0lpu+F6ALEUz65noe8zDUa3qHpimOHZR4R
+Kttjd5cUvpoUmRGywO6wT/gUITJDT5+rosuoD6o7BlXGEilXCNQ314cnrUlZp5Gr
+RHpejXDbl85IULFzk/bwg2D5zfHhMf1bfHEhYxQUqq/F3pN+aLHsIqKqkHWetUNy
+6mSjhEv9DKgma3GX7lZjZuhCVPnHHd/Qj1vfyDBviP4NxDMcU6ij/UgQ8uQKTuEV
+V/xuZDDCVRHc6qnNSlSsKWNEz0pAoNZoWRsz+e86i9sgktxChL8Bq4fA1SCC28a5
+g4VCXA9DO2pJNdWY9BW/+mGBDAkgGNLQFwzLSABQ6XaCjGTXOqAHVcweMcDvOrRl
+++O/QmueD6i9a5jc2NvLi6Td11n0bt3+qsOR0C5CB8AMTVPNJLFMWx5R9N/pkvo=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certplus Root CA G2 O=Certplus
+# Subject: CN=Certplus Root CA G2 O=Certplus
+# Label: "Certplus Root CA G2"
+# Serial: 1492087096131536844209563509228951875861589
+# MD5 Fingerprint: a7:ee:c4:78:2d:1b:ee:2d:b9:29:ce:d6:a7:96:32:31
+# SHA1 Fingerprint: 4f:65:8e:1f:e9:06:d8:28:02:e9:54:47:41:c9:54:25:5d:69:cc:1a
+# SHA256 Fingerprint: 6c:c0:50:41:e6:44:5e:74:69:6c:4c:fb:c9:f8:0f:54:3b:7e:ab:bb:44:b4:ce:6f:78:7c:6a:99:71:c4:2f:17
+-----BEGIN CERTIFICATE-----
+MIICHDCCAaKgAwIBAgISESDZkc6uo+jF5//pAq/Pc7xVMAoGCCqGSM49BAMDMD4x
+CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs
+dXMgUm9vdCBDQSBHMjAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBaMD4x
+CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs
+dXMgUm9vdCBDQSBHMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABM0PW1aC3/BFGtat
+93nwHcmsltaeTpwftEIRyoa/bfuFo8XlGVzX7qY/aWfYeOKmycTbLXku54uNAm8x
+Ik0G42ByRZ0OQneezs/lf4WbGOT8zC5y0xaTTsqZY1yhBSpsBqNjMGEwDgYDVR0P
+AQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNqDYwJ5jtpMxjwj
+FNiPwyCrKGBZMB8GA1UdIwQYMBaAFNqDYwJ5jtpMxjwjFNiPwyCrKGBZMAoGCCqG
+SM49BAMDA2gAMGUCMHD+sAvZ94OX7PNVHdTcswYO/jOYnYs5kGuUIe22113WTNch
+p+e/IQ8rzfcq3IUHnQIxAIYUFuXcsGXCwI4Un78kFmjlvPl5adytRSv3tjFzzAal
+U5ORGpOucGpnutee5WEaXw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=OpenTrust Root CA G1 O=OpenTrust
+# Subject: CN=OpenTrust Root CA G1 O=OpenTrust
+# Label: "OpenTrust Root CA G1"
+# Serial: 1492036577811947013770400127034825178844775
+# MD5 Fingerprint: 76:00:cc:81:29:cd:55:5e:88:6a:7a:2e:f7:4d:39:da
+# SHA1 Fingerprint: 79:91:e8:34:f7:e2:ee:dd:08:95:01:52:e9:55:2d:14:e9:58:d5:7e
+# SHA256 Fingerprint: 56:c7:71:28:d9:8c:18:d9:1b:4c:fd:ff:bc:25:ee:91:03:d4:75:8e:a2:ab:ad:82:6a:90:f3:45:7d:46:0e:b4
+-----BEGIN CERTIFICATE-----
+MIIFbzCCA1egAwIBAgISESCzkFU5fX82bWTCp59rY45nMA0GCSqGSIb3DQEBCwUA
+MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w
+ZW5UcnVzdCBSb290IENBIEcxMB4XDTE0MDUyNjA4NDU1MFoXDTM4MDExNTAwMDAw
+MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU
+T3BlblRydXN0IFJvb3QgQ0EgRzEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQD4eUbalsUwXopxAy1wpLuwxQjczeY1wICkES3d5oeuXT2R0odsN7faYp6b
+wiTXj/HbpqbfRm9RpnHLPhsxZ2L3EVs0J9V5ToybWL0iEA1cJwzdMOWo010hOHQX
+/uMftk87ay3bfWAfjH1MBcLrARYVmBSO0ZB3Ij/swjm4eTrwSSTilZHcYTSSjFR0
+77F9jAHiOH3BX2pfJLKOYheteSCtqx234LSWSE9mQxAGFiQD4eCcjsZGT44ameGP
+uY4zbGneWK2gDqdkVBFpRGZPTBKnjix9xNRbxQA0MMHZmf4yzgeEtE7NCv82TWLx
+p2NX5Ntqp66/K7nJ5rInieV+mhxNaMbBGN4zK1FGSxyO9z0M+Yo0FMT7MzUj8czx
+Kselu7Cizv5Ta01BG2Yospb6p64KTrk5M0ScdMGTHPjgniQlQ/GbI4Kq3ywgsNw2
+TgOzfALU5nsaqocTvz6hdLubDuHAk5/XpGbKuxs74zD0M1mKB3IDVedzagMxbm+W
+G+Oin6+Sx+31QrclTDsTBM8clq8cIqPQqwWyTBIjUtz9GVsnnB47ev1CI9sjgBPw
+vFEVVJSmdz7QdFG9URQIOTfLHzSpMJ1ShC5VkLG631UAC9hWLbFJSXKAqWLXwPYY
+EQRVzXR7z2FwefR7LFxckvzluFqrTJOVoSfupb7PcSNCupt2LQIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUl0YhVyE1
+2jZVx/PxN3DlCPaTKbYwHwYDVR0jBBgwFoAUl0YhVyE12jZVx/PxN3DlCPaTKbYw
+DQYJKoZIhvcNAQELBQADggIBAB3dAmB84DWn5ph76kTOZ0BP8pNuZtQ5iSas000E
+PLuHIT839HEl2ku6q5aCgZG27dmxpGWX4m9kWaSW7mDKHyP7Rbr/jyTwyqkxf3kf
+gLMtMrpkZ2CvuVnN35pJ06iCsfmYlIrM4LvgBBuZYLFGZdwIorJGnkSI6pN+VxbS
+FXJfLkur1J1juONI5f6ELlgKn0Md/rcYkoZDSw6cMoYsYPXpSOqV7XAp8dUv/TW0
+V8/bhUiZucJvbI/NeJWsZCj9VrDDb8O+WVLhX4SPgPL0DTatdrOjteFkdjpY3H1P
+XlZs5VVZV6Xf8YpmMIzUUmI4d7S+KNfKNsSbBfD4Fdvb8e80nR14SohWZ25g/4/I
+i+GOvUKpMwpZQhISKvqxnUOOBZuZ2mKtVzazHbYNeS2WuOvyDEsMpZTGMKcmGS3t
+TAZQMPH9WD25SxdfGbRqhFS0OE85og2WaMMolP3tLR9Ka0OWLpABEPs4poEL0L91
+09S5zvE/bw4cHjdx5RiHdRk/ULlepEU0rbDK5uUTdg8xFKmOLZTW1YVNcxVPS/Ky
+Pu1svf0OnWZzsD2097+o4BGkxK51CUpjAEggpsadCwmKtODmzj7HPiY46SvepghJ
+AwSQiumPv+i2tCqjI40cHLI5kqiPAlxAOXXUc0ECd97N4EOH1uS6SsNsEn/+KuYj
+1oxx
+-----END CERTIFICATE-----
+
+# Issuer: CN=OpenTrust Root CA G2 O=OpenTrust
+# Subject: CN=OpenTrust Root CA G2 O=OpenTrust
+# Label: "OpenTrust Root CA G2"
+# Serial: 1492012448042702096986875987676935573415441
+# MD5 Fingerprint: 57:24:b6:59:24:6b:ae:c8:fe:1c:0c:20:f2:c0:4e:eb
+# SHA1 Fingerprint: 79:5f:88:60:c5:ab:7c:3d:92:e6:cb:f4:8d:e1:45:cd:11:ef:60:0b
+# SHA256 Fingerprint: 27:99:58:29:fe:6a:75:15:c1:bf:e8:48:f9:c4:76:1d:b1:6c:22:59:29:25:7b:f4:0d:08:94:f2:9e:a8:ba:f2
+-----BEGIN CERTIFICATE-----
+MIIFbzCCA1egAwIBAgISESChaRu/vbm9UpaPI+hIvyYRMA0GCSqGSIb3DQEBDQUA
+MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w
+ZW5UcnVzdCBSb290IENBIEcyMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAw
+MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU
+T3BlblRydXN0IFJvb3QgQ0EgRzIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQDMtlelM5QQgTJT32F+D3Y5z1zCU3UdSXqWON2ic2rxb95eolq5cSG+Ntmh
+/LzubKh8NBpxGuga2F8ORAbtp+Dz0mEL4DKiltE48MLaARf85KxP6O6JHnSrT78e
+CbY2albz4e6WiWYkBuTNQjpK3eCasMSCRbP+yatcfD7J6xcvDH1urqWPyKwlCm/6
+1UWY0jUJ9gNDlP7ZvyCVeYCYitmJNbtRG6Q3ffyZO6v/v6wNj0OxmXsWEH4db0fE
+FY8ElggGQgT4hNYdvJGmQr5J1WqIP7wtUdGejeBSzFfdNTVY27SPJIjki9/ca1TS
+gSuyzpJLHB9G+h3Ykst2Z7UJmQnlrBcUVXDGPKBWCgOz3GIZ38i1MH/1PCZ1Eb3X
+G7OHngevZXHloM8apwkQHZOJZlvoPGIytbU6bumFAYueQ4xncyhZW+vj3CzMpSZy
+YhK05pyDRPZRpOLAeiRXyg6lPzq1O4vldu5w5pLeFlwoW5cZJ5L+epJUzpM5ChaH
+vGOz9bGTXOBut9Dq+WIyiET7vycotjCVXRIouZW+j1MY5aIYFuJWpLIsEPUdN6b4
+t/bQWVyJ98LVtZR00dX+G7bw5tYee9I8y6jj9RjzIR9u701oBnstXW5DiabA+aC/
+gh7PU3+06yzbXfZqfUAkBXKJOAGTy3HCOV0GEfZvePg3DTmEJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUajn6QiL3
+5okATV59M4PLuG53hq8wHwYDVR0jBBgwFoAUajn6QiL35okATV59M4PLuG53hq8w
+DQYJKoZIhvcNAQENBQADggIBAJjLq0A85TMCl38th6aP1F5Kr7ge57tx+4BkJamz
+Gj5oXScmp7oq4fBXgwpkTx4idBvpkF/wrM//T2h6OKQQbA2xx6R3gBi2oihEdqc0
+nXGEL8pZ0keImUEiyTCYYW49qKgFbdEfwFFEVn8nNQLdXpgKQuswv42hm1GqO+qT
+RmTFAHneIWv2V6CG1wZy7HBGS4tz3aAhdT7cHcCP009zHIXZ/n9iyJVvttN7jLpT
+wm+bREx50B1ws9efAvSyB7DH5fitIw6mVskpEndI2S9G/Tvw/HRwkqWOOAgfZDC2
+t0v7NqwQjqBSM2OdAzVWxWm9xiNaJ5T2pBL4LTM8oValX9YZ6e18CL13zSdkzJTa
+TkZQh+D5wVOAHrut+0dSixv9ovneDiK3PTNZbNTe9ZUGMg1RGUFcPk8G97krgCf2
+o6p6fAbhQ8MTOWIaNr3gKC6UAuQpLmBVrkA9sHSSXvAgZJY/X0VdiLWK2gKgW0VU
+3jg9CcCoSmVGFvyqv1ROTVu+OEO3KMqLM6oaJbolXCkvW0pujOotnCr2BXbgd5eA
+iN1nE28daCSLT7d0geX0YJ96Vdc+N9oWaz53rK4YcJUIeSkDiv7BO7M/Gg+kO14f
+WKGVyasvc0rQLW6aWQ9VGHgtPFGml4vmu7JwqkwR3v98KzfUetF3NI/n+UL3PIEM
+S1IK
+-----END CERTIFICATE-----
+
+# Issuer: CN=OpenTrust Root CA G3 O=OpenTrust
+# Subject: CN=OpenTrust Root CA G3 O=OpenTrust
+# Label: "OpenTrust Root CA G3"
+# Serial: 1492104908271485653071219941864171170455615
+# MD5 Fingerprint: 21:37:b4:17:16:92:7b:67:46:70:a9:96:d7:a8:13:24
+# SHA1 Fingerprint: 6e:26:64:f3:56:bf:34:55:bf:d1:93:3f:7c:01:de:d8:13:da:8a:a6
+# SHA256 Fingerprint: b7:c3:62:31:70:6e:81:07:8c:36:7c:b8:96:19:8f:1e:32:08:dd:92:69:49:dd:8f:57:09:a4:10:f7:5b:62:92
+-----BEGIN CERTIFICATE-----
+MIICITCCAaagAwIBAgISESDm+Ez8JLC+BUCs2oMbNGA/MAoGCCqGSM49BAMDMEAx
+CzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5U
+cnVzdCBSb290IENBIEczMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAwMFow
+QDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwUT3Bl
+blRydXN0IFJvb3QgQ0EgRzMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARK7liuTcpm
+3gY6oxH84Bjwbhy6LTAMidnW7ptzg6kjFYwvWYpa3RTqnVkrQ7cG7DK2uu5Bta1d
+oYXM6h0UZqNnfkbilPPntlahFVmhTzeXuSIevRHr9LIfXsMUmuXZl5mjYzBhMA4G
+A1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRHd8MUi2I5
+DMlv4VBN0BBY3JWIbTAfBgNVHSMEGDAWgBRHd8MUi2I5DMlv4VBN0BBY3JWIbTAK
+BggqhkjOPQQDAwNpADBmAjEAj6jcnboMBBf6Fek9LykBl7+BFjNAk2z8+e2AcG+q
+j9uEwov1NcoG3GRvaBbhj5G5AjEA2Euly8LQCGzpGPta3U1fJAuwACEl74+nBCZx
+4nxp5V2a+EEfOzmTk51V6s2N8fvB
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X1 O=Internet Security Research Group
+# Subject: CN=ISRG Root X1 O=Internet Security Research Group
+# Label: "ISRG Root X1"
+# Serial: 172886928669790476064670243504169061120
+# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e
+# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8
+# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
+TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
+cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
+WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
+ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
+h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
+0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
+A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
+T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
+B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
+B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
+KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
+OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
+jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
+qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
+rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
+hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
+ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
+3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
+NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
+ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
+TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
+jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
+oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
+4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
+mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
+emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
+-----END CERTIFICATE-----
+
+# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Label: "AC RAIZ FNMT-RCM"
+# Serial: 485876308206448804701554682760554759
+# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d
+# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20
+# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx
+CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ
+WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ
+BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG
+Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/
+yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf
+BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz
+WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF
+tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z
+374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC
+IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL
+mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7
+wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS
+MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2
+ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet
+UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H
+YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3
+LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD
+nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1
+RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM
+LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf
+77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N
+JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm
+fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp
+6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp
+1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B
+9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok
+RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv
+uu8wd+RU4riEmViAqhOLUTpPSPaLtrM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 1 O=Amazon
+# Subject: CN=Amazon Root CA 1 O=Amazon
+# Label: "Amazon Root CA 1"
+# Serial: 143266978916655856878034712317230054538369994
+# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6
+# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16
+# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e
+-----BEGIN CERTIFICATE-----
+MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj
+ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM
+9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw
+IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6
+VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L
+93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm
+jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA
+A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI
+U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs
+N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv
+o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU
+5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy
+rqXRfboQnoZsG4q5WTP468SQvvG5
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 2 O=Amazon
+# Subject: CN=Amazon Root CA 2 O=Amazon
+# Label: "Amazon Root CA 2"
+# Serial: 143266982885963551818349160658925006970653239
+# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66
+# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a
+# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK
+gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ
+W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg
+1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K
+8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r
+2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me
+z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR
+8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj
+mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz
+7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6
++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI
+0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm
+UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2
+LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY
++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS
+k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl
+7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm
+btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl
+urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+
+fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63
+n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE
+76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H
+9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT
+4PsJYGw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 3 O=Amazon
+# Subject: CN=Amazon Root CA 3 O=Amazon
+# Label: "Amazon Root CA 3"
+# Serial: 143266986699090766294700635381230934788665930
+# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87
+# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e
+# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4
+-----BEGIN CERTIFICATE-----
+MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl
+ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr
+ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr
+BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM
+YyRIHN8wfdVoOw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 4 O=Amazon
+# Subject: CN=Amazon Root CA 4 O=Amazon
+# Label: "Amazon Root CA 4"
+# Serial: 143266989758080763974105200630763877849284878
+# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd
+# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be
+# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92
+-----BEGIN CERTIFICATE-----
+MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi
+9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk
+M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB
+MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw
+CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW
+1KyLa2tJElMzrdfkviT8tQp21KW8EA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
+# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
+# Label: "LuxTrust Global Root 2"
+# Serial: 59914338225734147123941058376788110305822489521
+# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c
+# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f
+# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5
+-----BEGIN CERTIFICATE-----
+MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL
+BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV
+BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw
+MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B
+LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F
+ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem
+hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1
+EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn
+Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4
+zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ
+96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m
+j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g
+DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+
+8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j
+X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH
+hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB
+KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0
+Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT
++Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL
+BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9
+BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO
+jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9
+loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c
+qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+
+2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/
+JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre
+zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf
+LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+
+x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6
+oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr
+-----END CERTIFICATE-----
+
+# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1"
+# Serial: 1
+# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49
+# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca
+# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16
+-----BEGIN CERTIFICATE-----
+MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx
+GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp
+bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w
+KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0
+BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy
+dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG
+EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll
+IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU
+QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT
+TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg
+LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7
+a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr
+LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr
+N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X
+YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/
+iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f
+AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH
+V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh
+AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf
+IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4
+lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c
+8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf
+lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Label: "GDCA TrustAUTH R5 ROOT"
+# Serial: 9009899650740120186
+# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4
+# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4
+# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93
+-----BEGIN CERTIFICATE-----
+MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE
+BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ
+IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0
+MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV
+BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w
+HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj
+Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj
+TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u
+KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj
+qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm
+MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12
+ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP
+zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk
+L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC
+jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA
+HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC
+AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg
+p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm
+DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5
+COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry
+L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf
+JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg
+IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io
+2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV
+09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ
+XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq
+T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe
+MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-1"
+# Serial: 15752444095811006489
+# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45
+# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a
+# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y
+IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB
+pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h
+IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG
+A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU
+cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid
+RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V
+seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme
+9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV
+EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW
+hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/
+DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
+ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I
+/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf
+ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ
+yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts
+L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN
+zl/HHk484IkzlQsPpTLWPFp5LBk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor RootCert CA-2"
+# Serial: 2711694510199101698
+# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64
+# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0
+# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65
+-----BEGIN CERTIFICATE-----
+MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig
+Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk
+MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg
+Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD
+VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy
+dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+
+QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq
+1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp
+2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK
+DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape
+az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF
+3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88
+oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM
+g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3
+mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh
+8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd
+BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U
+nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw
+DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX
+dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+
+MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL
+/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX
+CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa
+ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW
+2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7
+N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3
+Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB
+As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp
+5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu
+1uwJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
+# Label: "TrustCor ECA-1"
+# Serial: 9548242946988625984
+# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c
+# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd
+# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD
+VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
+MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
+cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y
+IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV
+BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
+IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
+dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig
+RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb
+3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA
+BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5
+3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou
+owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/
+wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF
+ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf
+BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/
+MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv
+civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2
+AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F
+hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50
+soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI
+WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi
+tJ/X5g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Label: "SSL.com Root Certification Authority RSA"
+# Serial: 8875640296558310041
+# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29
+# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb
+# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69
+-----BEGIN CERTIFICATE-----
+MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE
+BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK
+DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz
+OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv
+bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R
+xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX
+qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC
+C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3
+6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh
+/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF
+YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E
+JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc
+US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8
+ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm
++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi
+M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G
+A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV
+cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc
+Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs
+PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/
+q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0
+cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr
+a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I
+H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y
+K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu
+nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf
+oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY
+Ic2wBlX7Jz9TkHCpBB5XJ7k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com Root Certification Authority ECC"
+# Serial: 8495723813297216424
+# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e
+# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a
+# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65
+-----BEGIN CERTIFICATE-----
+MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz
+WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0
+b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS
+b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI
+7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg
+CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud
+EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD
+VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T
+kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+
+gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority RSA R2"
+# Serial: 6248227494352943350
+# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95
+# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a
+# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c
+-----BEGIN CERTIFICATE-----
+MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV
+BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE
+CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy
+MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G
+A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD
+DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq
+M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf
+OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa
+4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9
+HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR
+aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA
+b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ
+Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV
+PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO
+pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu
+UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY
+MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV
+HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4
+9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW
+s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5
+Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg
+cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM
+79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz
+/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt
+ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm
+Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK
+QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ
+w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi
+S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07
+mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority ECC"
+# Serial: 3182246526754555285
+# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90
+# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d
+# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8
+-----BEGIN CERTIFICATE-----
+MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx
+NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv
+bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA
+VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku
+WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX
+5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ
+ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg
+h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg==
+-----END CERTIFICATE-----
diff --git a/py311/lib/python3.11/site-packages/botocore/client.py b/py311/lib/python3.11/site-packages/botocore/client.py
new file mode 100644
index 0000000000000000000000000000000000000000..87eb7f1d7096130de58d08a6255a3e4c9bb154e0
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/client.py
@@ -0,0 +1,1440 @@
+# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import logging
+
+from botocore import (
+ UNSIGNED, # noqa: F401
+ waiter,
+ xform_name,
+)
+from botocore.args import ClientArgsCreator
+from botocore.auth import AUTH_TYPE_MAPS, resolve_auth_type
+from botocore.awsrequest import prepare_request_dict
+from botocore.compress import maybe_compress_request
+from botocore.config import Config
+from botocore.context import with_current_context
+from botocore.credentials import RefreshableCredentials
+from botocore.discovery import (
+ EndpointDiscoveryHandler,
+ EndpointDiscoveryManager,
+ block_endpoint_discovery_required_operations,
+)
+from botocore.docs.docstring import ClientMethodDocstring, PaginatorDocstring
+from botocore.exceptions import (
+ ClientError, # noqa: F401
+ DataNotFoundError,
+ InvalidEndpointDiscoveryConfigurationError,
+ OperationNotPageableError,
+ UnknownServiceError,
+ UnknownSignatureVersionError,
+)
+from botocore.history import get_global_history_recorder
+from botocore.hooks import first_non_none_response
+from botocore.httpchecksum import (
+ apply_request_checksum,
+ resolve_checksum_context,
+)
+from botocore.model import ServiceModel
+from botocore.paginate import Paginator
+from botocore.retries import adaptive, standard
+from botocore.useragent import UserAgentString, register_feature_id
+from botocore.utils import (
+ CachedProperty,
+ EventbridgeSignerSetter,
+ S3ArnParamHandler, # noqa: F401
+ S3ControlArnParamHandler, # noqa: F401
+ S3ControlArnParamHandlerv2,
+ S3ControlEndpointSetter, # noqa: F401
+ S3EndpointSetter, # noqa: F401
+ S3ExpressIdentityResolver,
+ S3RegionRedirector, # noqa: F401
+ S3RegionRedirectorv2,
+ ensure_boolean,
+ get_service_module_name,
+)
+
+logger = logging.getLogger(__name__)
+history_recorder = get_global_history_recorder()
+
+
+class ClientCreator:
+ """Creates client objects for a service."""
+
+ def __init__(
+ self,
+ loader,
+ endpoint_resolver,
+ user_agent,
+ event_emitter,
+ retry_handler_factory,
+ retry_config_translator,
+ response_parser_factory=None,
+ exceptions_factory=None,
+ config_store=None,
+ user_agent_creator=None,
+ auth_token_resolver=None,
+ ):
+ self._loader = loader
+ self._endpoint_resolver = endpoint_resolver
+ self._user_agent = user_agent
+ self._event_emitter = event_emitter
+ self._retry_handler_factory = retry_handler_factory
+ self._retry_config_translator = retry_config_translator
+ self._response_parser_factory = response_parser_factory
+ self._exceptions_factory = exceptions_factory
+ # TODO: Migrate things away from scoped_config in favor of the
+ # config_store. The config store can pull things from both the scoped
+ # config and environment variables (and potentially more in the
+ # future).
+ self._config_store = config_store
+ self._user_agent_creator = user_agent_creator
+ self._auth_token_resolver = auth_token_resolver
+
+ def create_client(
+ self,
+ service_name,
+ region_name,
+ is_secure=True,
+ endpoint_url=None,
+ verify=None,
+ credentials=None,
+ scoped_config=None,
+ api_version=None,
+ client_config=None,
+ auth_token=None,
+ ):
+ responses = self._event_emitter.emit(
+ 'choose-service-name', service_name=service_name
+ )
+ service_name = first_non_none_response(responses, default=service_name)
+ service_model = self._load_service_model(service_name, api_version)
+ try:
+ endpoints_ruleset_data = self._load_service_endpoints_ruleset(
+ service_name, api_version
+ )
+ partition_data = self._loader.load_data('partitions')
+ except UnknownServiceError:
+ endpoints_ruleset_data = None
+ partition_data = None
+ logger.info(
+ 'No endpoints ruleset found for service %s, falling back to '
+ 'legacy endpoint routing.',
+ service_name,
+ )
+
+ cls = self._create_client_class(service_name, service_model)
+ region_name, client_config = self._normalize_fips_region(
+ region_name, client_config
+ )
+ if auth := service_model.metadata.get('auth'):
+ service_signature_version = resolve_auth_type(auth)
+ else:
+ service_signature_version = service_model.metadata.get(
+ 'signatureVersion'
+ )
+ endpoint_bridge = ClientEndpointBridge(
+ self._endpoint_resolver,
+ scoped_config,
+ client_config,
+ service_signing_name=service_model.metadata.get('signingName'),
+ config_store=self._config_store,
+ service_signature_version=service_signature_version,
+ )
+ if token := self._evaluate_client_specific_token(
+ service_model.signing_name
+ ):
+ auth_token = token
+ client_args = self._get_client_args(
+ service_model,
+ region_name,
+ is_secure,
+ endpoint_url,
+ verify,
+ credentials,
+ scoped_config,
+ client_config,
+ endpoint_bridge,
+ auth_token,
+ endpoints_ruleset_data,
+ partition_data,
+ )
+ service_client = cls(**client_args)
+ self._register_retries(service_client)
+ self._register_s3_events(
+ client=service_client,
+ endpoint_bridge=None,
+ endpoint_url=None,
+ client_config=client_config,
+ scoped_config=scoped_config,
+ )
+ self._register_s3express_events(client=service_client)
+ self._register_s3_control_events(client=service_client)
+ self._register_importexport_events(client=service_client)
+ self._register_endpoint_discovery(
+ service_client, endpoint_url, client_config
+ )
+ return service_client
+
+ def create_client_class(self, service_name, api_version=None):
+ service_model = self._load_service_model(service_name, api_version)
+ return self._create_client_class(service_name, service_model)
+
+ def _create_client_class(self, service_name, service_model):
+ class_attributes = self._create_methods(service_model)
+ py_name_to_operation_name = self._create_name_mapping(service_model)
+ class_attributes['_PY_TO_OP_NAME'] = py_name_to_operation_name
+ bases = [BaseClient]
+ service_id = service_model.service_id.hyphenize()
+ self._event_emitter.emit(
+ f'creating-client-class.{service_id}',
+ class_attributes=class_attributes,
+ base_classes=bases,
+ )
+ class_name = get_service_module_name(service_model)
+ cls = type(str(class_name), tuple(bases), class_attributes)
+ return cls
+
+ def _normalize_fips_region(self, region_name, client_config):
+ if region_name is not None:
+ normalized_region_name = region_name.replace('fips-', '').replace(
+ '-fips', ''
+ )
+ # If region has been transformed then set flag
+ if normalized_region_name != region_name:
+ config_use_fips_endpoint = Config(use_fips_endpoint=True)
+ if client_config:
+ # Keeping endpoint setting client specific
+ client_config = client_config.merge(
+ config_use_fips_endpoint
+ )
+ else:
+ client_config = config_use_fips_endpoint
+ logger.warning(
+ 'transforming region from %s to %s and setting '
+ 'use_fips_endpoint to true. client should not '
+ 'be configured with a fips psuedo region.',
+ region_name,
+ normalized_region_name,
+ )
+ region_name = normalized_region_name
+ return region_name, client_config
+
+ def _load_service_model(self, service_name, api_version=None):
+ json_model = self._loader.load_service_model(
+ service_name, 'service-2', api_version=api_version
+ )
+ service_model = ServiceModel(json_model, service_name=service_name)
+ return service_model
+
+ def _load_service_endpoints_ruleset(self, service_name, api_version=None):
+ return self._loader.load_service_model(
+ service_name, 'endpoint-rule-set-1', api_version=api_version
+ )
+
+ def _register_retries(self, client):
+ retry_mode = client.meta.config.retries['mode']
+ if retry_mode == 'standard':
+ self._register_v2_standard_retries(client)
+ elif retry_mode == 'adaptive':
+ self._register_v2_standard_retries(client)
+ self._register_v2_adaptive_retries(client)
+ elif retry_mode == 'legacy':
+ self._register_legacy_retries(client)
+ else:
+ return
+ register_feature_id(f'RETRY_MODE_{retry_mode.upper()}')
+
+ def _register_v2_standard_retries(self, client):
+ max_attempts = client.meta.config.retries.get('total_max_attempts')
+ kwargs = {'client': client}
+ if max_attempts is not None:
+ kwargs['max_attempts'] = max_attempts
+ standard.register_retry_handler(**kwargs)
+
+ def _register_v2_adaptive_retries(self, client):
+ adaptive.register_retry_handler(client)
+
+ def _register_legacy_retries(self, client):
+ endpoint_prefix = client.meta.service_model.endpoint_prefix
+ service_id = client.meta.service_model.service_id
+ service_event_name = service_id.hyphenize()
+
+ # First, we load the entire retry config for all services,
+ # then pull out just the information we need.
+ original_config = self._loader.load_data('_retry')
+ if not original_config:
+ return
+
+ retries = self._transform_legacy_retries(client.meta.config.retries)
+ retry_config = self._retry_config_translator.build_retry_config(
+ endpoint_prefix,
+ original_config.get('retry', {}),
+ original_config.get('definitions', {}),
+ retries,
+ )
+
+ logger.debug(
+ "Registering retry handlers for service: %s",
+ client.meta.service_model.service_name,
+ )
+ handler = self._retry_handler_factory.create_retry_handler(
+ retry_config, endpoint_prefix
+ )
+ unique_id = f'retry-config-{service_event_name}'
+ client.meta.events.register(
+ f"needs-retry.{service_event_name}", handler, unique_id=unique_id
+ )
+
+ def _transform_legacy_retries(self, retries):
+ if retries is None:
+ return
+ copied_args = retries.copy()
+ if 'total_max_attempts' in retries:
+ copied_args = retries.copy()
+ copied_args['max_attempts'] = (
+ copied_args.pop('total_max_attempts') - 1
+ )
+ return copied_args
+
+ def _get_retry_mode(self, client, config_store):
+ client_retries = client.meta.config.retries
+ if (
+ client_retries is not None
+ and client_retries.get('mode') is not None
+ ):
+ return client_retries['mode']
+ return config_store.get_config_variable('retry_mode') or 'legacy'
+
+ def _register_endpoint_discovery(self, client, endpoint_url, config):
+ if endpoint_url is not None:
+ # Don't register any handlers in the case of a custom endpoint url
+ return
+ # Only attach handlers if the service supports discovery
+ if client.meta.service_model.endpoint_discovery_operation is None:
+ return
+ events = client.meta.events
+ service_id = client.meta.service_model.service_id.hyphenize()
+ enabled = False
+ if config and config.endpoint_discovery_enabled is not None:
+ enabled = config.endpoint_discovery_enabled
+ elif self._config_store:
+ enabled = self._config_store.get_config_variable(
+ 'endpoint_discovery_enabled'
+ )
+
+ enabled = self._normalize_endpoint_discovery_config(enabled)
+ if enabled and self._requires_endpoint_discovery(client, enabled):
+ discover = enabled is True
+ manager = EndpointDiscoveryManager(
+ client, always_discover=discover
+ )
+ handler = EndpointDiscoveryHandler(manager)
+ handler.register(events, service_id)
+ else:
+ events.register(
+ 'before-parameter-build',
+ block_endpoint_discovery_required_operations,
+ )
+
+ def _normalize_endpoint_discovery_config(self, enabled):
+ """Config must either be a boolean-string or string-literal 'auto'"""
+ if isinstance(enabled, str):
+ enabled = enabled.lower().strip()
+ if enabled == 'auto':
+ return enabled
+ elif enabled in ('true', 'false'):
+ return ensure_boolean(enabled)
+ elif isinstance(enabled, bool):
+ return enabled
+
+ raise InvalidEndpointDiscoveryConfigurationError(config_value=enabled)
+
+ def _requires_endpoint_discovery(self, client, enabled):
+ if enabled == "auto":
+ return client.meta.service_model.endpoint_discovery_required
+ return enabled
+
+ def _register_eventbridge_events(
+ self, client, endpoint_bridge, endpoint_url
+ ):
+ if client.meta.service_model.service_name != 'events':
+ return
+ EventbridgeSignerSetter(
+ endpoint_resolver=self._endpoint_resolver,
+ region=client.meta.region_name,
+ endpoint_url=endpoint_url,
+ ).register(client.meta.events)
+
+ def _register_s3express_events(
+ self,
+ client,
+ endpoint_bridge=None,
+ endpoint_url=None,
+ client_config=None,
+ scoped_config=None,
+ ):
+ if client.meta.service_model.service_name != 's3':
+ return
+ S3ExpressIdentityResolver(client, RefreshableCredentials).register()
+
+ def _register_s3_events(
+ self,
+ client,
+ endpoint_bridge,
+ endpoint_url,
+ client_config,
+ scoped_config,
+ ):
+ if client.meta.service_model.service_name != 's3':
+ return
+ S3RegionRedirectorv2(None, client).register()
+ self._set_s3_presign_signature_version(
+ client.meta, client_config, scoped_config
+ )
+ client.meta.events.register(
+ 'before-parameter-build.s3', self._inject_s3_input_parameters
+ )
+
+ def _register_s3_control_events(
+ self,
+ client,
+ endpoint_bridge=None,
+ endpoint_url=None,
+ client_config=None,
+ scoped_config=None,
+ ):
+ if client.meta.service_model.service_name != 's3control':
+ return
+ S3ControlArnParamHandlerv2().register(client.meta.events)
+
+ def _set_s3_presign_signature_version(
+ self, client_meta, client_config, scoped_config
+ ):
+ # This will return the manually configured signature version, or None
+ # if none was manually set. If a customer manually sets the signature
+ # version, we always want to use what they set.
+ provided_signature_version = _get_configured_signature_version(
+ 's3', client_config, scoped_config
+ )
+ if provided_signature_version is not None:
+ return
+
+ # Check to see if the region is a region that we know about. If we
+ # don't know about a region, then we can safely assume it's a new
+ # region that is sigv4 only, since all new S3 regions only allow sigv4.
+ # The only exception is aws-global. This is a pseudo-region for the
+ # global endpoint, we should respect the signature versions it
+ # supports, which includes v2.
+ regions = self._endpoint_resolver.get_available_endpoints(
+ 's3', client_meta.partition
+ )
+ if (
+ client_meta.region_name != 'aws-global'
+ and client_meta.region_name not in regions
+ ):
+ return
+
+ # If it is a region we know about, we want to default to sigv2, so here
+ # we check to see if it is available.
+ endpoint = self._endpoint_resolver.construct_endpoint(
+ 's3', client_meta.region_name
+ )
+ signature_versions = endpoint['signatureVersions']
+ if 's3' not in signature_versions:
+ return
+
+ # We now know that we're in a known region that supports sigv2 and
+ # the customer hasn't set a signature version so we default the
+ # signature version to sigv2.
+ client_meta.events.register(
+ 'choose-signer.s3', self._default_s3_presign_to_sigv2
+ )
+
+ def _inject_s3_input_parameters(self, params, context, **kwargs):
+ context['input_params'] = {}
+ inject_parameters = ('Bucket', 'Delete', 'Key', 'Prefix')
+ for inject_parameter in inject_parameters:
+ if inject_parameter in params:
+ context['input_params'][inject_parameter] = params[
+ inject_parameter
+ ]
+
+ def _default_s3_presign_to_sigv2(self, signature_version, **kwargs):
+ """
+ Returns the 's3' (sigv2) signer if presigning an s3 request. This is
+ intended to be used to set the default signature version for the signer
+ to sigv2. Situations where an asymmetric signature is required are the
+ exception, for example MRAP needs v4a.
+
+ :type signature_version: str
+ :param signature_version: The current client signature version.
+
+ :type signing_name: str
+ :param signing_name: The signing name of the service.
+
+ :return: 's3' if the request is an s3 presign request, None otherwise
+ """
+ if signature_version.startswith('v4a'):
+ return
+
+ if signature_version.startswith('v4-s3express'):
+ return signature_version
+
+ for suffix in ['-query', '-presign-post']:
+ if signature_version.endswith(suffix):
+ return f's3{suffix}'
+
+ def _register_importexport_events(
+ self,
+ client,
+ endpoint_bridge=None,
+ endpoint_url=None,
+ client_config=None,
+ scoped_config=None,
+ ):
+ if client.meta.service_model.service_name != 'importexport':
+ return
+ self._set_importexport_signature_version(
+ client.meta, client_config, scoped_config
+ )
+
+ def _set_importexport_signature_version(
+ self, client_meta, client_config, scoped_config
+ ):
+ # This will return the manually configured signature version, or None
+ # if none was manually set. If a customer manually sets the signature
+ # version, we always want to use what they set.
+ configured_signature_version = _get_configured_signature_version(
+ 'importexport', client_config, scoped_config
+ )
+ if configured_signature_version is not None:
+ return
+
+ # importexport has a modeled signatureVersion of v2, but we
+ # previously switched to v4 via endpoint.json before endpoint rulesets.
+ # Override the model's signatureVersion for backwards compatability.
+ client_meta.events.register(
+ 'choose-signer.importexport', self._default_signer_to_sigv4
+ )
+
+ def _default_signer_to_sigv4(self, signature_version, **kwargs):
+ return 'v4'
+
+ def _get_client_args(
+ self,
+ service_model,
+ region_name,
+ is_secure,
+ endpoint_url,
+ verify,
+ credentials,
+ scoped_config,
+ client_config,
+ endpoint_bridge,
+ auth_token,
+ endpoints_ruleset_data,
+ partition_data,
+ ):
+ args_creator = ClientArgsCreator(
+ self._event_emitter,
+ self._user_agent,
+ self._response_parser_factory,
+ self._loader,
+ self._exceptions_factory,
+ config_store=self._config_store,
+ user_agent_creator=self._user_agent_creator,
+ )
+ return args_creator.get_client_args(
+ service_model,
+ region_name,
+ is_secure,
+ endpoint_url,
+ verify,
+ credentials,
+ scoped_config,
+ client_config,
+ endpoint_bridge,
+ auth_token,
+ endpoints_ruleset_data,
+ partition_data,
+ )
+
+ def _create_methods(self, service_model):
+ op_dict = {}
+ for operation_name in service_model.operation_names:
+ py_operation_name = xform_name(operation_name)
+ op_dict[py_operation_name] = self._create_api_method(
+ py_operation_name, operation_name, service_model
+ )
+ return op_dict
+
+ def _create_name_mapping(self, service_model):
+ # py_name -> OperationName, for every operation available
+ # for a service.
+ mapping = {}
+ for operation_name in service_model.operation_names:
+ py_operation_name = xform_name(operation_name)
+ mapping[py_operation_name] = operation_name
+ return mapping
+
+ def _create_api_method(
+ self, py_operation_name, operation_name, service_model
+ ):
+ def _api_call(self, *args, **kwargs):
+ # We're accepting *args so that we can give a more helpful
+ # error message than TypeError: _api_call takes exactly
+ # 1 argument.
+ if args:
+ raise TypeError(
+ f"{py_operation_name}() only accepts keyword arguments."
+ )
+ # The "self" in this scope is referring to the BaseClient.
+ return self._make_api_call(operation_name, kwargs)
+
+ _api_call.__name__ = str(py_operation_name)
+
+ # Add the docstring to the client method
+ operation_model = service_model.operation_model(operation_name)
+ docstring = ClientMethodDocstring(
+ operation_model=operation_model,
+ method_name=operation_name,
+ event_emitter=self._event_emitter,
+ method_description=operation_model.documentation,
+ example_prefix=f'response = client.{py_operation_name}',
+ include_signature=False,
+ )
+ _api_call.__doc__ = docstring
+ return _api_call
+
+ def _evaluate_client_specific_token(self, signing_name):
+ # Resolves an auth_token for the given signing_name.
+ # Returns None if no resolver is set or if resolution fails.
+ resolver = self._auth_token_resolver
+ if not resolver or not signing_name:
+ return None
+
+ return resolver(signing_name=signing_name)
+
+
+class ClientEndpointBridge:
+ """Bridges endpoint data and client creation
+
+ This class handles taking out the relevant arguments from the endpoint
+ resolver and determining which values to use, taking into account any
+ client configuration options and scope configuration options.
+
+ This class also handles determining what, if any, region to use if no
+ explicit region setting is provided. For example, Amazon S3 client will
+ utilize "us-east-1" by default if no region can be resolved."""
+
+ DEFAULT_ENDPOINT = '{service}.{region}.amazonaws.com'
+ _DUALSTACK_CUSTOMIZED_SERVICES = ['s3', 's3-control']
+
+ def __init__(
+ self,
+ endpoint_resolver,
+ scoped_config=None,
+ client_config=None,
+ default_endpoint=None,
+ service_signing_name=None,
+ config_store=None,
+ service_signature_version=None,
+ ):
+ self.service_signing_name = service_signing_name
+ self.endpoint_resolver = endpoint_resolver
+ self.scoped_config = scoped_config
+ self.client_config = client_config
+ self.default_endpoint = default_endpoint or self.DEFAULT_ENDPOINT
+ self.config_store = config_store
+ self.service_signature_version = service_signature_version
+
+ def resolve(
+ self, service_name, region_name=None, endpoint_url=None, is_secure=True
+ ):
+ region_name = self._check_default_region(service_name, region_name)
+ use_dualstack_endpoint = self._resolve_use_dualstack_endpoint(
+ service_name
+ )
+ use_fips_endpoint = self._resolve_endpoint_variant_config_var(
+ 'use_fips_endpoint'
+ )
+ resolved = self.endpoint_resolver.construct_endpoint(
+ service_name,
+ region_name,
+ use_dualstack_endpoint=use_dualstack_endpoint,
+ use_fips_endpoint=use_fips_endpoint,
+ )
+
+ # If we can't resolve the region, we'll attempt to get a global
+ # endpoint for non-regionalized services (iam, route53, etc)
+ if not resolved:
+ # TODO: fallback partition_name should be configurable in the
+ # future for users to define as needed.
+ resolved = self.endpoint_resolver.construct_endpoint(
+ service_name,
+ region_name,
+ partition_name='aws',
+ use_dualstack_endpoint=use_dualstack_endpoint,
+ use_fips_endpoint=use_fips_endpoint,
+ )
+
+ if resolved:
+ return self._create_endpoint(
+ resolved, service_name, region_name, endpoint_url, is_secure
+ )
+ else:
+ return self._assume_endpoint(
+ service_name, region_name, endpoint_url, is_secure
+ )
+
+ def resolver_uses_builtin_data(self):
+ return self.endpoint_resolver.uses_builtin_data
+
+ def _check_default_region(self, service_name, region_name):
+ if region_name is not None:
+ return region_name
+ # Use the client_config region if no explicit region was provided.
+ if self.client_config and self.client_config.region_name is not None:
+ return self.client_config.region_name
+
+ def _create_endpoint(
+ self, resolved, service_name, region_name, endpoint_url, is_secure
+ ):
+ region_name, signing_region = self._pick_region_values(
+ resolved, region_name, endpoint_url
+ )
+ if endpoint_url is None:
+ endpoint_url = self._make_url(
+ resolved.get('hostname'),
+ is_secure,
+ resolved.get('protocols', []),
+ )
+ signature_version = self._resolve_signature_version(
+ service_name, resolved
+ )
+ signing_name = self._resolve_signing_name(service_name, resolved)
+ return self._create_result(
+ service_name=service_name,
+ region_name=region_name,
+ signing_region=signing_region,
+ signing_name=signing_name,
+ endpoint_url=endpoint_url,
+ metadata=resolved,
+ signature_version=signature_version,
+ )
+
+ def _resolve_endpoint_variant_config_var(self, config_var):
+ client_config = self.client_config
+ config_val = False
+
+ # Client configuration arg has precedence
+ if client_config and getattr(client_config, config_var) is not None:
+ return getattr(client_config, config_var)
+ elif self.config_store is not None:
+ # Check config store
+ config_val = self.config_store.get_config_variable(config_var)
+ return config_val
+
+ def _resolve_use_dualstack_endpoint(self, service_name):
+ s3_dualstack_mode = self._is_s3_dualstack_mode(service_name)
+ if s3_dualstack_mode is not None:
+ return s3_dualstack_mode
+ return self._resolve_endpoint_variant_config_var(
+ 'use_dualstack_endpoint'
+ )
+
+ def _is_s3_dualstack_mode(self, service_name):
+ if service_name not in self._DUALSTACK_CUSTOMIZED_SERVICES:
+ return None
+ # TODO: This normalization logic is duplicated from the
+ # ClientArgsCreator class. Consolidate everything to
+ # ClientArgsCreator. _resolve_signature_version also has similarly
+ # duplicated logic.
+ client_config = self.client_config
+ if (
+ client_config is not None
+ and client_config.s3 is not None
+ and 'use_dualstack_endpoint' in client_config.s3
+ ):
+ # Client config trumps scoped config.
+ return client_config.s3['use_dualstack_endpoint']
+ if self.scoped_config is not None:
+ enabled = self.scoped_config.get('s3', {}).get(
+ 'use_dualstack_endpoint'
+ )
+ if enabled in [True, 'True', 'true']:
+ return True
+
+ def _assume_endpoint(
+ self, service_name, region_name, endpoint_url, is_secure
+ ):
+ if endpoint_url is None:
+ # Expand the default hostname URI template.
+ hostname = self.default_endpoint.format(
+ service=service_name, region=region_name
+ )
+ endpoint_url = self._make_url(
+ hostname, is_secure, ['http', 'https']
+ )
+ logger.debug(
+ 'Assuming an endpoint for %s, %s: %s',
+ service_name,
+ region_name,
+ endpoint_url,
+ )
+ # We still want to allow the user to provide an explicit version.
+ signature_version = self._resolve_signature_version(
+ service_name, {'signatureVersions': ['v4']}
+ )
+ signing_name = self._resolve_signing_name(service_name, resolved={})
+ return self._create_result(
+ service_name=service_name,
+ region_name=region_name,
+ signing_region=region_name,
+ signing_name=signing_name,
+ signature_version=signature_version,
+ endpoint_url=endpoint_url,
+ metadata={},
+ )
+
+ def _create_result(
+ self,
+ service_name,
+ region_name,
+ signing_region,
+ signing_name,
+ endpoint_url,
+ signature_version,
+ metadata,
+ ):
+ return {
+ 'service_name': service_name,
+ 'region_name': region_name,
+ 'signing_region': signing_region,
+ 'signing_name': signing_name,
+ 'endpoint_url': endpoint_url,
+ 'signature_version': signature_version,
+ 'metadata': metadata,
+ }
+
+ def _make_url(self, hostname, is_secure, supported_protocols):
+ if is_secure and 'https' in supported_protocols:
+ scheme = 'https'
+ else:
+ scheme = 'http'
+ return f'{scheme}://{hostname}'
+
+ def _resolve_signing_name(self, service_name, resolved):
+ # CredentialScope overrides everything else.
+ if (
+ 'credentialScope' in resolved
+ and 'service' in resolved['credentialScope']
+ ):
+ return resolved['credentialScope']['service']
+ # Use the signingName from the model if present.
+ if self.service_signing_name:
+ return self.service_signing_name
+ # Just assume is the same as the service name.
+ return service_name
+
+ def _pick_region_values(self, resolved, region_name, endpoint_url):
+ signing_region = region_name
+ if endpoint_url is None:
+ # Do not use the region name or signing name from the resolved
+ # endpoint if the user explicitly provides an endpoint_url. This
+ # would happen if we resolve to an endpoint where the service has
+ # a "defaults" section that overrides all endpoint with a single
+ # hostname and credentialScope. This has been the case historically
+ # for how STS has worked. The only way to resolve an STS endpoint
+ # was to provide a region_name and an endpoint_url. In that case,
+ # we would still resolve an endpoint, but we would not use the
+ # resolved endpointName or signingRegion because we want to allow
+ # custom endpoints.
+ region_name = resolved['endpointName']
+ signing_region = region_name
+ if (
+ 'credentialScope' in resolved
+ and 'region' in resolved['credentialScope']
+ ):
+ signing_region = resolved['credentialScope']['region']
+ return region_name, signing_region
+
+ def _resolve_signature_version(self, service_name, resolved):
+ configured_version = _get_configured_signature_version(
+ service_name, self.client_config, self.scoped_config
+ )
+ if configured_version is not None:
+ return configured_version
+
+ # These have since added the "auth" key to the service model
+ # with "aws.auth#sigv4", but preserve existing behavior from
+ # when we preferred endpoints.json over the service models
+ if service_name in ('s3', 's3-control'):
+ return 's3v4'
+
+ if self.service_signature_version is not None:
+ # Prefer the service model
+ potential_versions = [self.service_signature_version]
+ else:
+ # Fall back to endpoints.json to preserve existing behavior, which
+ # may be useful for users who have custom service models
+ potential_versions = resolved.get('signatureVersions', [])
+ # This was added for the V2 -> V4 transition,
+ # for services that added V4 after V2 in endpoints.json
+ if 'v4' in potential_versions:
+ return 'v4'
+ # Now just iterate over the signature versions in order until we
+ # find the first one that is known to Botocore.
+ for known in potential_versions:
+ if known in AUTH_TYPE_MAPS:
+ return known
+
+ raise UnknownSignatureVersionError(
+ signature_version=potential_versions
+ )
+
+
+class BaseClient:
+ # This is actually reassigned with the py->op_name mapping
+ # when the client creator creates the subclass. This value is used
+ # because calls such as client.get_paginator('list_objects') use the
+ # snake_case name, but we need to know the ListObjects form.
+ # xform_name() does the ListObjects->list_objects conversion, but
+ # we need the reverse mapping here.
+ _PY_TO_OP_NAME = {}
+
+ def __init__(
+ self,
+ serializer,
+ endpoint,
+ response_parser,
+ event_emitter,
+ request_signer,
+ service_model,
+ loader,
+ client_config,
+ partition,
+ exceptions_factory,
+ endpoint_ruleset_resolver=None,
+ user_agent_creator=None,
+ ):
+ self._serializer = serializer
+ self._endpoint = endpoint
+ self._ruleset_resolver = endpoint_ruleset_resolver
+ self._response_parser = response_parser
+ self._request_signer = request_signer
+ self._cache = {}
+ self._loader = loader
+ self._client_config = client_config
+ self.meta = ClientMeta(
+ event_emitter,
+ self._client_config,
+ endpoint.host,
+ service_model,
+ self._PY_TO_OP_NAME,
+ partition,
+ )
+ self._exceptions_factory = exceptions_factory
+ self._exceptions = None
+ self._user_agent_creator = user_agent_creator
+ if self._user_agent_creator is None:
+ self._user_agent_creator = (
+ UserAgentString.from_environment().with_client_config(
+ self._client_config
+ )
+ )
+ self._register_handlers()
+
+ def __getattr__(self, item):
+ service_id = self._service_model.service_id.hyphenize()
+ event_name = f'getattr.{service_id}.{item}'
+
+ handler, event_response = self.meta.events.emit_until_response(
+ event_name, client=self
+ )
+
+ if event_response is not None:
+ return event_response
+
+ raise AttributeError(
+ f"'{self.__class__.__name__}' object has no attribute '{item}'"
+ )
+
+ def close(self):
+ """Closes underlying endpoint connections."""
+ self._endpoint.close()
+
+ def _register_handlers(self):
+ # Register the handler required to sign requests.
+ service_id = self.meta.service_model.service_id.hyphenize()
+ self.meta.events.register(
+ f"request-created.{service_id}", self._request_signer.handler
+ )
+ # Rebuild user agent string right before request is sent
+ # to ensure all registered features are included.
+ self.meta.events.register_last(
+ f"request-created.{service_id}",
+ self._user_agent_creator.rebuild_and_replace_user_agent_handler,
+ )
+
+ @property
+ def _service_model(self):
+ return self.meta.service_model
+
+ @with_current_context()
+ def _make_api_call(self, operation_name, api_params):
+ operation_model = self._service_model.operation_model(operation_name)
+ service_name = self._service_model.service_name
+ history_recorder.record(
+ 'API_CALL',
+ {
+ 'service': service_name,
+ 'operation': operation_name,
+ 'params': api_params,
+ },
+ )
+ if operation_model.deprecated:
+ logger.debug(
+ 'Warning: %s.%s() is deprecated', service_name, operation_name
+ )
+ request_context = {
+ 'client_region': self.meta.region_name,
+ 'client_config': self.meta.config,
+ 'has_streaming_input': operation_model.has_streaming_input,
+ 'auth_type': operation_model.resolved_auth_type,
+ 'unsigned_payload': operation_model.unsigned_payload,
+ 'auth_options': self._service_model.metadata.get('auth'),
+ }
+
+ api_params = self._emit_api_params(
+ api_params=api_params,
+ operation_model=operation_model,
+ context=request_context,
+ )
+ (
+ endpoint_url,
+ additional_headers,
+ properties,
+ ) = self._resolve_endpoint_ruleset(
+ operation_model, api_params, request_context
+ )
+ if properties:
+ # Pass arbitrary endpoint info with the Request
+ # for use during construction.
+ request_context['endpoint_properties'] = properties
+ request_dict = self._convert_to_request_dict(
+ api_params=api_params,
+ operation_model=operation_model,
+ endpoint_url=endpoint_url,
+ context=request_context,
+ headers=additional_headers,
+ )
+ resolve_checksum_context(request_dict, operation_model, api_params)
+
+ service_id = self._service_model.service_id.hyphenize()
+ handler, event_response = self.meta.events.emit_until_response(
+ f'before-call.{service_id}.{operation_name}',
+ model=operation_model,
+ params=request_dict,
+ request_signer=self._request_signer,
+ context=request_context,
+ )
+
+ if event_response is not None:
+ http, parsed_response = event_response
+ else:
+ maybe_compress_request(
+ self.meta.config, request_dict, operation_model
+ )
+ apply_request_checksum(request_dict)
+ http, parsed_response = self._make_request(
+ operation_model, request_dict, request_context
+ )
+
+ self.meta.events.emit(
+ f'after-call.{service_id}.{operation_name}',
+ http_response=http,
+ parsed=parsed_response,
+ model=operation_model,
+ context=request_context,
+ )
+
+ if http.status_code >= 300:
+ error_info = parsed_response.get("Error", {})
+ error_code = request_context.get(
+ 'error_code_override'
+ ) or error_info.get("Code")
+ error_class = self.exceptions.from_code(error_code)
+ raise error_class(parsed_response, operation_name)
+ else:
+ return parsed_response
+
+ def _make_request(self, operation_model, request_dict, request_context):
+ try:
+ return self._endpoint.make_request(operation_model, request_dict)
+ except Exception as e:
+ self.meta.events.emit(
+ f'after-call-error.{self._service_model.service_id.hyphenize()}.{operation_model.name}',
+ exception=e,
+ context=request_context,
+ )
+ raise
+
+ def _convert_to_request_dict(
+ self,
+ api_params,
+ operation_model,
+ endpoint_url,
+ context=None,
+ headers=None,
+ set_user_agent_header=True,
+ ):
+ request_dict = self._serializer.serialize_to_request(
+ api_params, operation_model
+ )
+ if not self._client_config.inject_host_prefix:
+ request_dict.pop('host_prefix', None)
+ if headers is not None:
+ request_dict['headers'].update(headers)
+ if set_user_agent_header:
+ user_agent = self._user_agent_creator.to_string()
+ else:
+ user_agent = None
+ prepare_request_dict(
+ request_dict,
+ endpoint_url=endpoint_url,
+ user_agent=user_agent,
+ context=context,
+ )
+ return request_dict
+
+ def _emit_api_params(self, api_params, operation_model, context):
+ # Given the API params provided by the user and the operation_model
+ # we can serialize the request to a request_dict.
+ operation_name = operation_model.name
+
+ # Emit an event that allows users to modify the parameters at the
+ # beginning of the method. It allows handlers to modify existing
+ # parameters or return a new set of parameters to use.
+ service_id = self._service_model.service_id.hyphenize()
+ responses = self.meta.events.emit(
+ f'provide-client-params.{service_id}.{operation_name}',
+ params=api_params,
+ model=operation_model,
+ context=context,
+ )
+ api_params = first_non_none_response(responses, default=api_params)
+
+ self.meta.events.emit(
+ f'before-parameter-build.{service_id}.{operation_name}',
+ params=api_params,
+ model=operation_model,
+ context=context,
+ )
+ return api_params
+
+ def _resolve_endpoint_ruleset(
+ self,
+ operation_model,
+ params,
+ request_context,
+ ignore_signing_region=False,
+ ):
+ """Returns endpoint URL and list of additional headers returned from
+ EndpointRulesetResolver for the given operation and params. If the
+ ruleset resolver is not available, for example because the service has
+ no endpoints ruleset file, the legacy endpoint resolver's value is
+ returned.
+
+ Use ignore_signing_region for generating presigned URLs or any other
+ situation where the signing region information from the ruleset
+ resolver should be ignored.
+
+ Returns tuple of URL and headers dictionary. Additionally, the
+ request_context dict is modified in place with any signing information
+ returned from the ruleset resolver.
+ """
+ if self._ruleset_resolver is None:
+ endpoint_url = self.meta.endpoint_url
+ additional_headers = {}
+ endpoint_properties = {}
+ else:
+ endpoint_info = self._ruleset_resolver.construct_endpoint(
+ operation_model=operation_model,
+ call_args=params,
+ request_context=request_context,
+ )
+ endpoint_url = endpoint_info.url
+ additional_headers = endpoint_info.headers
+ endpoint_properties = endpoint_info.properties
+ # If authSchemes is present, overwrite default auth type and
+ # signing context derived from service model.
+ auth_schemes = endpoint_info.properties.get('authSchemes')
+ if auth_schemes is not None:
+ auth_info = self._ruleset_resolver.auth_schemes_to_signing_ctx(
+ auth_schemes
+ )
+ auth_type, signing_context = auth_info
+ request_context['auth_type'] = auth_type
+ if 'region' in signing_context and ignore_signing_region:
+ del signing_context['region']
+ if 'signing' in request_context:
+ request_context['signing'].update(signing_context)
+ else:
+ request_context['signing'] = signing_context
+
+ return endpoint_url, additional_headers, endpoint_properties
+
+ def get_paginator(self, operation_name):
+ """Create a paginator for an operation.
+
+ :type operation_name: string
+ :param operation_name: The operation name. This is the same name
+ as the method name on the client. For example, if the
+ method name is ``create_foo``, and you'd normally invoke the
+ operation as ``client.create_foo(**kwargs)``, if the
+ ``create_foo`` operation can be paginated, you can use the
+ call ``client.get_paginator("create_foo")``.
+
+ :raise OperationNotPageableError: Raised if the operation is not
+ pageable. You can use the ``client.can_paginate`` method to
+ check if an operation is pageable.
+
+ :rtype: ``botocore.paginate.Paginator``
+ :return: A paginator object.
+
+ """
+ if not self.can_paginate(operation_name):
+ raise OperationNotPageableError(operation_name=operation_name)
+ else:
+ actual_operation_name = self._PY_TO_OP_NAME[operation_name]
+
+ # Create a new paginate method that will serve as a proxy to
+ # the underlying Paginator.paginate method. This is needed to
+ # attach a docstring to the method.
+ def paginate(self, **kwargs):
+ return Paginator.paginate(self, **kwargs)
+
+ paginator_config = self._cache['page_config'][
+ actual_operation_name
+ ]
+ # Add the docstring for the paginate method.
+ paginate.__doc__ = PaginatorDocstring(
+ paginator_name=actual_operation_name,
+ event_emitter=self.meta.events,
+ service_model=self.meta.service_model,
+ paginator_config=paginator_config,
+ include_signature=False,
+ )
+
+ # Rename the paginator class based on the type of paginator.
+ service_module_name = get_service_module_name(
+ self.meta.service_model
+ )
+ paginator_class_name = (
+ f"{service_module_name}.Paginator.{actual_operation_name}"
+ )
+
+ # Create the new paginator class
+ documented_paginator_cls = type(
+ paginator_class_name, (Paginator,), {'paginate': paginate}
+ )
+
+ operation_model = self._service_model.operation_model(
+ actual_operation_name
+ )
+ paginator = documented_paginator_cls(
+ getattr(self, operation_name),
+ paginator_config,
+ operation_model,
+ )
+ return paginator
+
+ def can_paginate(self, operation_name):
+ """Check if an operation can be paginated.
+
+ :type operation_name: string
+ :param operation_name: The operation name. This is the same name
+ as the method name on the client. For example, if the
+ method name is ``create_foo``, and you'd normally invoke the
+ operation as ``client.create_foo(**kwargs)``, if the
+ ``create_foo`` operation can be paginated, you can use the
+ call ``client.get_paginator("create_foo")``.
+
+ :return: ``True`` if the operation can be paginated,
+ ``False`` otherwise.
+
+ """
+ if 'page_config' not in self._cache:
+ try:
+ page_config = self._loader.load_service_model(
+ self._service_model.service_name,
+ 'paginators-1',
+ self._service_model.api_version,
+ )['pagination']
+ self._cache['page_config'] = page_config
+ except DataNotFoundError:
+ self._cache['page_config'] = {}
+ actual_operation_name = self._PY_TO_OP_NAME[operation_name]
+ return actual_operation_name in self._cache['page_config']
+
+ def _get_waiter_config(self):
+ if 'waiter_config' not in self._cache:
+ try:
+ waiter_config = self._loader.load_service_model(
+ self._service_model.service_name,
+ 'waiters-2',
+ self._service_model.api_version,
+ )
+ self._cache['waiter_config'] = waiter_config
+ except DataNotFoundError:
+ self._cache['waiter_config'] = {}
+ return self._cache['waiter_config']
+
+ def get_waiter(self, waiter_name):
+ """Returns an object that can wait for some condition.
+
+ :type waiter_name: str
+ :param waiter_name: The name of the waiter to get. See the waiters
+ section of the service docs for a list of available waiters.
+
+ :returns: The specified waiter object.
+ :rtype: ``botocore.waiter.Waiter``
+ """
+ config = self._get_waiter_config()
+ if not config:
+ raise ValueError(f"Waiter does not exist: {waiter_name}")
+ model = waiter.WaiterModel(config)
+ mapping = {}
+ for name in model.waiter_names:
+ mapping[xform_name(name)] = name
+ if waiter_name not in mapping:
+ raise ValueError(f"Waiter does not exist: {waiter_name}")
+
+ return waiter.create_waiter_with_client(
+ mapping[waiter_name], model, self
+ )
+
+ @CachedProperty
+ def waiter_names(self):
+ """Returns a list of all available waiters."""
+ config = self._get_waiter_config()
+ if not config:
+ return []
+ model = waiter.WaiterModel(config)
+ # Waiter configs is a dict, we just want the waiter names
+ # which are the keys in the dict.
+ return [xform_name(name) for name in model.waiter_names]
+
+ @property
+ def exceptions(self):
+ if self._exceptions is None:
+ self._exceptions = self._load_exceptions()
+ return self._exceptions
+
+ def _load_exceptions(self):
+ return self._exceptions_factory.create_client_exceptions(
+ self._service_model
+ )
+
+ def _get_credentials(self):
+ """
+ This private interface is subject to abrupt breaking changes, including
+ removal, in any botocore release.
+ """
+ return self._request_signer._credentials
+
+
+class ClientMeta:
+ """Holds additional client methods.
+
+ This class holds additional information for clients. It exists for
+ two reasons:
+
+ * To give advanced functionality to clients
+ * To namespace additional client attributes from the operation
+ names which are mapped to methods at runtime. This avoids
+ ever running into collisions with operation names.
+
+ """
+
+ def __init__(
+ self,
+ events,
+ client_config,
+ endpoint_url,
+ service_model,
+ method_to_api_mapping,
+ partition,
+ ):
+ self.events = events
+ self._client_config = client_config
+ self._endpoint_url = endpoint_url
+ self._service_model = service_model
+ self._method_to_api_mapping = method_to_api_mapping
+ self._partition = partition
+
+ @property
+ def service_model(self):
+ return self._service_model
+
+ @property
+ def region_name(self):
+ return self._client_config.region_name
+
+ @property
+ def endpoint_url(self):
+ return self._endpoint_url
+
+ @property
+ def config(self):
+ return self._client_config
+
+ @property
+ def method_to_api_mapping(self):
+ return self._method_to_api_mapping
+
+ @property
+ def partition(self):
+ return self._partition
+
+
+def _get_configured_signature_version(
+ service_name, client_config, scoped_config
+):
+ """
+ Gets the manually configured signature version.
+
+ :returns: the customer configured signature version, or None if no
+ signature version was configured.
+ """
+ # Client config overrides everything.
+ if client_config and client_config.signature_version is not None:
+ return client_config.signature_version
+
+ # Scoped config overrides picking from the endpoint metadata.
+ if scoped_config is not None:
+ # A given service may have service specific configuration in the
+ # config file, so we need to check there as well.
+ service_config = scoped_config.get(service_name)
+ if service_config is not None and isinstance(service_config, dict):
+ version = service_config.get('signature_version')
+ if version:
+ logger.debug(
+ "Switching signature version for service %s "
+ "to version %s based on config file override.",
+ service_name,
+ version,
+ )
+ return version
+ return None
diff --git a/py311/lib/python3.11/site-packages/botocore/compat.py b/py311/lib/python3.11/site-packages/botocore/compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..62a265c4c6c80a142c1950abade84f55ba5927d1
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/compat.py
@@ -0,0 +1,371 @@
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+import copy
+import datetime
+import sys
+import inspect
+import warnings
+import hashlib
+from http.client import HTTPMessage
+import logging
+import shlex
+import re
+import os
+from collections import OrderedDict
+from collections.abc import MutableMapping
+from math import floor
+
+from botocore.vendored import six
+from botocore.exceptions import MD5UnavailableError
+from dateutil.tz import tzlocal
+from urllib3 import exceptions
+
+logger = logging.getLogger(__name__)
+
+
+class HTTPHeaders(HTTPMessage):
+ pass
+
+from urllib.parse import (
+ quote,
+ urlencode,
+ unquote,
+ unquote_plus,
+ urlparse,
+ urlsplit,
+ urlunsplit,
+ urljoin,
+ parse_qsl,
+ parse_qs,
+)
+from http.client import HTTPResponse
+from io import IOBase as _IOBase
+from base64 import encodebytes
+from email.utils import formatdate
+from itertools import zip_longest
+file_type = _IOBase
+zip = zip
+
+# In python3, unquote takes a str() object, url decodes it,
+# then takes the bytestring and decodes it to utf-8.
+unquote_str = unquote_plus
+
+def set_socket_timeout(http_response, timeout):
+ """Set the timeout of the socket from an HTTPResponse.
+
+ :param http_response: An instance of ``httplib.HTTPResponse``
+
+ """
+ http_response._fp.fp.raw._sock.settimeout(timeout)
+
+def accepts_kwargs(func):
+ return inspect.getfullargspec(func)[2]
+
+def ensure_unicode(s, encoding=None, errors=None):
+ # NOOP in Python 3, because every string is already unicode
+ return s
+
+def ensure_bytes(s, encoding='utf-8', errors='strict'):
+ if isinstance(s, str):
+ return s.encode(encoding, errors)
+ if isinstance(s, bytes):
+ return s
+ raise ValueError(f"Expected str or bytes, received {type(s)}.")
+
+
+import xml.etree.ElementTree as ETree
+XMLParseError = ETree.ParseError
+
+import json
+
+
+def filter_ssl_warnings():
+ # Ignore warnings related to SNI as it is not being used in validations.
+ warnings.filterwarnings(
+ 'ignore',
+ message="A true SSLContext object is not available.*",
+ category=exceptions.InsecurePlatformWarning,
+ module=r".*urllib3\.util\.ssl_",
+ )
+
+
+@classmethod
+def from_dict(cls, d):
+ new_instance = cls()
+ for key, value in d.items():
+ new_instance[key] = value
+ return new_instance
+
+
+@classmethod
+def from_pairs(cls, pairs):
+ new_instance = cls()
+ for key, value in pairs:
+ new_instance[key] = value
+ return new_instance
+
+
+HTTPHeaders.from_dict = from_dict
+HTTPHeaders.from_pairs = from_pairs
+
+
+def copy_kwargs(kwargs):
+ """
+ This used to be a compat shim for 2.6 but is now just an alias.
+ """
+ copy_kwargs = copy.copy(kwargs)
+ return copy_kwargs
+
+
+def total_seconds(delta):
+ """
+ Returns the total seconds in a ``datetime.timedelta``.
+
+ This used to be a compat shim for 2.6 but is now just an alias.
+
+ :param delta: The timedelta object
+ :type delta: ``datetime.timedelta``
+ """
+ return delta.total_seconds()
+
+
+# Checks to see if md5 is available on this system. A given system might not
+# have access to it for various reasons, such as FIPS mode being enabled.
+try:
+ hashlib.md5(usedforsecurity=False)
+ MD5_AVAILABLE = True
+except (AttributeError, ValueError):
+ MD5_AVAILABLE = False
+
+
+def get_md5(*args, **kwargs):
+ """
+ Attempts to get an md5 hashing object.
+
+ :param args: Args to pass to the MD5 constructor
+ :param kwargs: Key word arguments to pass to the MD5 constructor
+ :return: An MD5 hashing object if available. If it is unavailable, None
+ is returned if raise_error_if_unavailable is set to False.
+ """
+ if MD5_AVAILABLE:
+ return hashlib.md5(*args, **kwargs)
+ else:
+ raise MD5UnavailableError()
+
+
+def compat_shell_split(s, platform=None):
+ if platform is None:
+ platform = sys.platform
+
+ if platform == "win32":
+ return _windows_shell_split(s)
+ else:
+ return shlex.split(s)
+
+
+def _windows_shell_split(s):
+ """Splits up a windows command as the built-in command parser would.
+
+ Windows has potentially bizarre rules depending on where you look. When
+ spawning a process via the Windows C runtime (which is what python does
+ when you call popen) the rules are as follows:
+
+ https://docs.microsoft.com/en-us/cpp/cpp/parsing-cpp-command-line-arguments
+
+ To summarize:
+
+ * Only space and tab are valid delimiters
+ * Double quotes are the only valid quotes
+ * Backslash is interpreted literally unless it is part of a chain that
+ leads up to a double quote. Then the backslashes escape the backslashes,
+ and if there is an odd number the final backslash escapes the quote.
+
+ :param s: The command string to split up into parts.
+ :return: A list of command components.
+ """
+ if not s:
+ return []
+
+ components = []
+ buff = []
+ is_quoted = False
+ num_backslashes = 0
+ for character in s:
+ if character == '\\':
+ # We can't simply append backslashes because we don't know if
+ # they are being used as escape characters or not. Instead we
+ # keep track of how many we've encountered and handle them when
+ # we encounter a different character.
+ num_backslashes += 1
+ elif character == '"':
+ if num_backslashes > 0:
+ # The backslashes are in a chain leading up to a double
+ # quote, so they are escaping each other.
+ buff.append('\\' * int(floor(num_backslashes / 2)))
+ remainder = num_backslashes % 2
+ num_backslashes = 0
+ if remainder == 1:
+ # The number of backslashes is uneven, so they are also
+ # escaping the double quote, so it needs to be added to
+ # the current component buffer.
+ buff.append('"')
+ continue
+
+ # We've encountered a double quote that is not escaped,
+ # so we toggle is_quoted.
+ is_quoted = not is_quoted
+
+ # If there are quotes, then we may want an empty string. To be
+ # safe, we add an empty string to the buffer so that we make
+ # sure it sticks around if there's nothing else between quotes.
+ # If there is other stuff between quotes, the empty string will
+ # disappear during the joining process.
+ buff.append('')
+ elif character in [' ', '\t'] and not is_quoted:
+ # Since the backslashes aren't leading up to a quote, we put in
+ # the exact number of backslashes.
+ if num_backslashes > 0:
+ buff.append('\\' * num_backslashes)
+ num_backslashes = 0
+
+ # Excess whitespace is ignored, so only add the components list
+ # if there is anything in the buffer.
+ if buff:
+ components.append(''.join(buff))
+ buff = []
+ else:
+ # Since the backslashes aren't leading up to a quote, we put in
+ # the exact number of backslashes.
+ if num_backslashes > 0:
+ buff.append('\\' * num_backslashes)
+ num_backslashes = 0
+ buff.append(character)
+
+ # Quotes must be terminated.
+ if is_quoted:
+ raise ValueError(f"No closing quotation in string: {s}")
+
+ # There may be some leftover backslashes, so we need to add them in.
+ # There's no quote so we add the exact number.
+ if num_backslashes > 0:
+ buff.append('\\' * num_backslashes)
+
+ # Add the final component in if there is anything in the buffer.
+ if buff:
+ components.append(''.join(buff))
+
+ return components
+
+
+def get_tzinfo_options():
+ # Due to dateutil/dateutil#197, Windows may fail to parse times in the past
+ # with the system clock. We can alternatively fallback to tzwininfo when
+ # this happens, which will get time info from the Windows registry.
+ if sys.platform == 'win32':
+ from dateutil.tz import tzwinlocal
+
+ return (tzlocal, tzwinlocal)
+ else:
+ return (tzlocal,)
+
+
+# Detect if CRT is available for use
+try:
+ import awscrt.auth
+
+ # Allow user opt-out if needed
+ disabled = os.environ.get('BOTO_DISABLE_CRT', "false")
+ HAS_CRT = not disabled.lower() == 'true'
+except ImportError:
+ HAS_CRT = False
+
+
+def has_minimum_crt_version(minimum_version):
+ """Not intended for use outside botocore."""
+ if not HAS_CRT:
+ return False
+
+ crt_version_str = awscrt.__version__
+ try:
+ crt_version_ints = map(int, crt_version_str.split("."))
+ crt_version_tuple = tuple(crt_version_ints)
+ except (TypeError, ValueError):
+ return False
+
+ return crt_version_tuple >= minimum_version
+
+
+def get_current_datetime(remove_tzinfo=True):
+ """Retrieve the current timezone in UTC, with or without an explicit timezone."""
+ datetime_now = datetime.datetime.now(datetime.timezone.utc)
+ if remove_tzinfo:
+ datetime_now = datetime_now.replace(tzinfo=None)
+ return datetime_now
+
+
+########################################################
+# urllib3 compat backports #
+########################################################
+
+# Vendoring IPv6 validation regex patterns from urllib3
+# https://github.com/urllib3/urllib3/blob/7e856c0/src/urllib3/util/url.py
+IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}"
+IPV4_RE = re.compile("^" + IPV4_PAT + "$")
+HEX_PAT = "[0-9A-Fa-f]{1,4}"
+LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=HEX_PAT, ipv4=IPV4_PAT)
+_subs = {"hex": HEX_PAT, "ls32": LS32_PAT}
+_variations = [
+ # 6( h16 ":" ) ls32
+ "(?:%(hex)s:){6}%(ls32)s",
+ # "::" 5( h16 ":" ) ls32
+ "::(?:%(hex)s:){5}%(ls32)s",
+ # [ h16 ] "::" 4( h16 ":" ) ls32
+ "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s",
+ # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
+ "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s",
+ # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
+ "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s",
+ # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
+ "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s",
+ # [ *4( h16 ":" ) h16 ] "::" ls32
+ "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s",
+ # [ *5( h16 ":" ) h16 ] "::" h16
+ "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s",
+ # [ *6( h16 ":" ) h16 ] "::"
+ "(?:(?:%(hex)s:){0,6}%(hex)s)?::",
+]
+
+UNRESERVED_PAT = (
+ r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~"
+)
+IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
+ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
+IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]"
+IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$")
+
+# These are the characters that are stripped by post-bpo-43882 urlparse().
+UNSAFE_URL_CHARS = frozenset('\t\r\n')
+
+# Detect if gzip is available for use
+try:
+ import gzip
+ HAS_GZIP = True
+except ImportError:
+ HAS_GZIP = False
+
+# Conditional import for awscrt EC crypto functionality
+if HAS_CRT and has_minimum_crt_version((0, 28, 4)):
+ from awscrt.crypto import EC
+else:
+ EC = None
diff --git a/py311/lib/python3.11/site-packages/botocore/compress.py b/py311/lib/python3.11/site-packages/botocore/compress.py
new file mode 100644
index 0000000000000000000000000000000000000000..d3dac6f0fc2f5494001b09299922a089849a1fdf
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/compress.py
@@ -0,0 +1,128 @@
+# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+"""
+NOTE: All functions in this module are considered private and are
+subject to abrupt breaking changes. Please do not use them directly.
+
+"""
+
+import io
+import logging
+from gzip import GzipFile
+from gzip import compress as gzip_compress
+
+from botocore.compat import urlencode
+from botocore.useragent import register_feature_id
+from botocore.utils import determine_content_length
+
+logger = logging.getLogger(__name__)
+
+
+def maybe_compress_request(config, request_dict, operation_model):
+ """Attempt to compress the request body using the modeled encodings."""
+ if _should_compress_request(config, request_dict, operation_model):
+ for encoding in operation_model.request_compression['encodings']:
+ encoder = COMPRESSION_MAPPING.get(encoding)
+ if encoder is not None:
+ logger.debug('Compressing request with %s encoding.', encoding)
+ request_dict['body'] = encoder(request_dict['body'])
+ _set_compression_header(request_dict['headers'], encoding)
+ return
+ else:
+ logger.debug('Unsupported compression encoding: %s', encoding)
+
+
+def _should_compress_request(config, request_dict, operation_model):
+ if (
+ config.disable_request_compression is not True
+ and config.signature_version != 'v2'
+ and operation_model.request_compression is not None
+ ):
+ if not _is_compressible_type(request_dict):
+ body_type = type(request_dict['body'])
+ log_msg = 'Body type %s does not support compression.'
+ logger.debug(log_msg, body_type)
+ return False
+
+ if operation_model.has_streaming_input:
+ streaming_input = operation_model.get_streaming_input()
+ streaming_metadata = streaming_input.metadata
+ return 'requiresLength' not in streaming_metadata
+
+ body_size = _get_body_size(request_dict['body'])
+ min_size = config.request_min_compression_size_bytes
+ return min_size <= body_size
+
+ return False
+
+
+def _is_compressible_type(request_dict):
+ body = request_dict['body']
+ # Coerce dict to a format compatible with compression.
+ if isinstance(body, dict):
+ body = urlencode(body, doseq=True, encoding='utf-8').encode('utf-8')
+ request_dict['body'] = body
+ is_supported_type = isinstance(body, (str, bytes, bytearray))
+ return is_supported_type or hasattr(body, 'read')
+
+
+def _get_body_size(body):
+ size = determine_content_length(body)
+ if size is None:
+ logger.debug(
+ 'Unable to get length of the request body: %s. '
+ 'Skipping compression.',
+ body,
+ )
+ size = 0
+ return size
+
+
+def _gzip_compress_body(body):
+ register_feature_id('GZIP_REQUEST_COMPRESSION')
+ if isinstance(body, str):
+ return gzip_compress(body.encode('utf-8'))
+ elif isinstance(body, (bytes, bytearray)):
+ return gzip_compress(body)
+ elif hasattr(body, 'read'):
+ if hasattr(body, 'seek') and hasattr(body, 'tell'):
+ current_position = body.tell()
+ compressed_obj = _gzip_compress_fileobj(body)
+ body.seek(current_position)
+ return compressed_obj
+ return _gzip_compress_fileobj(body)
+
+
+def _gzip_compress_fileobj(body):
+ compressed_obj = io.BytesIO()
+ with GzipFile(fileobj=compressed_obj, mode='wb') as gz:
+ while True:
+ chunk = body.read(8192)
+ if not chunk:
+ break
+ if isinstance(chunk, str):
+ chunk = chunk.encode('utf-8')
+ gz.write(chunk)
+ compressed_obj.seek(0)
+ return compressed_obj
+
+
+def _set_compression_header(headers, encoding):
+ ce_header = headers.get('Content-Encoding')
+ if ce_header is None:
+ headers['Content-Encoding'] = encoding
+ else:
+ headers['Content-Encoding'] = f'{ce_header},{encoding}'
+
+
+COMPRESSION_MAPPING = {'gzip': _gzip_compress_body}
diff --git a/py311/lib/python3.11/site-packages/botocore/config.py b/py311/lib/python3.11/site-packages/botocore/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..e0ca5a71eb61dba9aacf71d856f4661d1575eaf2
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/config.py
@@ -0,0 +1,477 @@
+# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import copy
+
+from botocore.compat import OrderedDict
+from botocore.endpoint import DEFAULT_TIMEOUT, MAX_POOL_CONNECTIONS
+from botocore.exceptions import (
+ InvalidMaxRetryAttemptsError,
+ InvalidRetryConfigurationError,
+ InvalidRetryModeError,
+ InvalidS3AddressingStyleError,
+)
+
+
+class Config:
+ """Advanced configuration for Botocore clients.
+
+ :type region_name: str
+ :param region_name: The region to use in instantiating the client
+
+ :type signature_version: str
+ :param signature_version: The signature version when signing requests.
+
+ :type user_agent: str
+ :param user_agent: The value to use in the User-Agent header.
+
+ :type user_agent_extra: str
+ :param user_agent_extra: The value to append to the current User-Agent
+ header value.
+
+ :type user_agent_appid: str
+ :param user_agent_appid: A value that gets included in the User-Agent
+ string in the format "app/". Allowed characters are
+ ASCII alphanumerics and ``!#$%&'*+-.^_`|~``. All other characters will
+ be replaced by a ``-``.
+
+ :type connect_timeout: float or int
+ :param connect_timeout: The time in seconds till a timeout exception is
+ thrown when attempting to make a connection. The default is 60
+ seconds.
+
+ :type read_timeout: float or int
+ :param read_timeout: The time in seconds till a timeout exception is
+ thrown when attempting to read from a connection. The default is
+ 60 seconds.
+
+ :type parameter_validation: bool
+ :param parameter_validation: Whether parameter validation should occur
+ when serializing requests. The default is True. You can disable
+ parameter validation for performance reasons. Otherwise, it's
+ recommended to leave parameter validation enabled.
+
+ :type max_pool_connections: int
+ :param max_pool_connections: The maximum number of connections to
+ keep in a connection pool. If this value is not set, the default
+ value of 10 is used.
+
+ :type proxies: dict
+ :param proxies: A dictionary of proxy servers to use by protocol or
+ endpoint, e.g.:
+ ``{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}``.
+ The proxies are used on each request.
+
+ :type proxies_config: dict
+ :param proxies_config: A dictionary of additional proxy configurations.
+ Valid keys are:
+
+ * ``proxy_ca_bundle`` -- The path to a custom certificate bundle to use
+ when establishing SSL/TLS connections with proxy.
+
+ * ``proxy_client_cert`` -- The path to a certificate for proxy
+ TLS client authentication.
+
+ When a string is provided it is treated as a path to a proxy client
+ certificate. When a two element tuple is provided, it will be
+ interpreted as the path to the client certificate, and the path
+ to the certificate key.
+
+ * ``proxy_use_forwarding_for_https`` -- For HTTPS proxies,
+ forward your requests to HTTPS destinations with an absolute
+ URI. We strongly recommend you only use this option with
+ trusted or corporate proxies. Value must be boolean.
+
+ :type s3: dict
+ :param s3: A dictionary of S3 specific configurations.
+ Valid keys are:
+
+ * ``use_accelerate_endpoint`` -- Refers to whether to use the S3
+ Accelerate endpoint. The value must be a boolean. If True, the
+ client will use the S3 Accelerate endpoint. If the S3 Accelerate
+ endpoint is being used then the addressing style will always
+ be virtual.
+
+ * ``payload_signing_enabled`` -- Refers to whether or not to SHA256
+ sign SigV4 payloads. For operations that support request checksums,
+ this only applies when ``request_checksum_calculation`` is set to
+ ``when_required``. Otherwise, this is disabled for
+ streaming uploads (UploadPart and PutObject) by default.
+
+ * ``addressing_style`` -- Refers to the style in which to address
+ s3 endpoints. Values must be a string that equals one of:
+
+ * ``auto`` -- Addressing style is chosen for user. Depending
+ on the configuration of client, the endpoint may be addressed in
+ the virtual or the path style. Note that this is the default
+ behavior if no style is specified.
+
+ * ``virtual`` -- Addressing style is always virtual. The name of the
+ bucket must be DNS compatible or an exception will be thrown.
+ Endpoints will be addressed as such: ``amzn-s3-demo-bucket.s3.amazonaws.com``
+
+ * ``path`` -- Addressing style is always by path. Endpoints will be
+ addressed as such: ``s3.amazonaws.com/amzn-s3-demo-bucket``
+
+ * ``us_east_1_regional_endpoint`` -- Refers to what S3 endpoint to use
+ when the region is configured to be us-east-1. Values must be a
+ string that equals:
+
+ * ``regional`` -- Use the us-east-1.amazonaws.com endpoint if the
+ client is configured to use the us-east-1 region.
+
+ * ``legacy`` -- Use the s3.amazonaws.com endpoint if the client is
+ configured to use the us-east-1 region. This is the default if
+ the configuration option is not specified.
+
+
+ :type retries: dict
+ :param retries: A dictionary for configuration related to retry behavior.
+ Valid keys are:
+
+ * ``total_max_attempts`` -- An integer representing the maximum number of
+ total attempts that will be made on a single request. This includes
+ the initial request, so a value of 1 indicates that no requests
+ will be retried. If ``total_max_attempts`` and ``max_attempts``
+ are both provided, ``total_max_attempts`` takes precedence.
+ ``total_max_attempts`` is preferred over ``max_attempts`` because
+ it maps to the ``AWS_MAX_ATTEMPTS`` environment variable and
+ the ``max_attempts`` config file value.
+ * ``max_attempts`` -- An integer representing the maximum number of
+ retry attempts that will be made on a single request. For
+ example, setting this value to 2 will result in the request
+ being retried at most two times after the initial request. Setting
+ this value to 0 will result in no retries ever being attempted after
+ the initial request. If not provided, the number of retries will
+ default to the value specified in the service model, which is
+ typically four retries.
+ * ``mode`` -- A string representing the type of retry mode botocore
+ should use. Valid values are:
+
+ * ``legacy`` - The pre-existing retry behavior.
+
+ * ``standard`` - The standardized set of retry rules. This will also
+ default to 3 max attempts unless overridden.
+
+ * ``adaptive`` - Retries with additional client side throttling.
+
+ :type client_cert: str, (str, str)
+ :param client_cert: The path to a certificate for TLS client authentication.
+
+ When a string is provided it is treated as a path to a client
+ certificate to be used when creating a TLS connection.
+
+ If a client key is to be provided alongside the client certificate the
+ client_cert should be set to a tuple of length two where the first
+ element is the path to the client certificate and the second element is
+ the path to the certificate key.
+
+ :type inject_host_prefix: bool
+ :param inject_host_prefix: Whether host prefix injection should occur.
+
+ Defaults to None.
+
+ The default of None is equivalent to setting to True, which enables
+ the injection of operation parameters into the prefix of the hostname.
+ Setting this to False disables the injection of operation parameters
+ into the prefix of the hostname. Setting this to False is useful for
+ clients providing custom endpoints that should not have their host
+ prefix modified.
+
+ :type use_dualstack_endpoint: bool
+ :param use_dualstack_endpoint: Setting to True enables dualstack
+ endpoint resolution.
+
+ Defaults to None.
+
+ :type use_fips_endpoint: bool
+ :param use_fips_endpoint: Setting to True enables fips
+ endpoint resolution.
+
+ Defaults to None.
+
+ :type ignore_configured_endpoint_urls: bool
+ :param ignore_configured_endpoint_urls: Setting to True disables use
+ of endpoint URLs provided via environment variables and
+ the shared configuration file.
+
+ Defaults to None.
+
+ :type tcp_keepalive: bool
+ :param tcp_keepalive: Enables the TCP Keep-Alive socket option used when
+ creating new connections if set to True.
+
+ Defaults to False.
+
+ :type request_min_compression_size_bytes: int
+ :param request_min_compression_size_bytes: The minimum size in bytes that a
+ request body should be to trigger compression. All requests with
+ streaming input that don't contain the ``requiresLength`` trait will be
+ compressed regardless of this setting.
+
+ Defaults to None.
+
+ :type disable_request_compression: bool
+ :param disable_request_compression: Disables request body compression if
+ set to True.
+
+ Defaults to None.
+
+ :type sigv4a_signing_region_set: string
+ :param sigv4a_signing_region_set: A set of AWS regions to apply the signature for
+ when using SigV4a for signing. Set to ``*`` to represent all regions.
+
+ Defaults to None.
+
+ :type client_context_params: dict
+ :param client_context_params: A dictionary of parameters specific to
+ individual services. If available, valid parameters can be found in
+ the ``Client Context Parameters`` section of the service client's
+ documentation. Invalid parameters or ones that are not used by the
+ specified service will be ignored.
+
+ Defaults to None.
+
+ :type request_checksum_calculation: str
+ :param request_checksum_calculation: Determines when a checksum will be
+ calculated for request payloads. Valid values are:
+
+ * ``when_supported`` -- When set, a checksum will be calculated for
+ all request payloads of operations modeled with the ``httpChecksum``
+ trait where ``requestChecksumRequired`` is ``true`` or a
+ ``requestAlgorithmMember`` is modeled.
+
+ * ``when_required`` -- When set, a checksum will only be calculated
+ for request payloads of operations modeled with the ``httpChecksum``
+ trait where ``requestChecksumRequired`` is ``true`` or where a
+ ``requestAlgorithmMember`` is modeled and supplied.
+
+ Defaults to None.
+
+ :type response_checksum_validation: str
+ :param response_checksum_validation: Determines when checksum validation
+ will be performed on response payloads. Valid values are:
+
+ * ``when_supported`` -- When set, checksum validation is performed on
+ all response payloads of operations modeled with the ``httpChecksum``
+ trait where ``responseAlgorithms`` is modeled, except when no modeled
+ checksum algorithms are supported.
+
+ * ``when_required`` -- When set, checksum validation is not performed
+ on response payloads of operations unless the checksum algorithm is
+ supported and the ``requestValidationModeMember`` member is set to ``ENABLED``.
+
+ Defaults to None.
+
+ :type account_id_endpoint_mode: str
+ :param account_id_endpoint_mode: The value used to determine the client's
+ behavior for account ID based endpoint routing. Valid values are:
+
+ * ``preferred`` - The endpoint should include account ID if available.
+ * ``disabled`` - A resolved endpoint does not include account ID.
+ * ``required`` - The endpoint must include account ID. If the account ID
+ isn't available, an exception will be raised.
+
+ If a value is not provided, the client will default to ``preferred``.
+
+ Defaults to None.
+
+ :type auth_scheme_preference: str
+ :param auth_scheme_preference: A comma-delimited string of case-sensitive
+ auth scheme names used to determine the client's auth scheme preference.
+
+ Defaults to None.
+ """
+
+ OPTION_DEFAULTS = OrderedDict(
+ [
+ ('region_name', None),
+ ('signature_version', None),
+ ('user_agent', None),
+ ('user_agent_extra', None),
+ ('user_agent_appid', None),
+ ('connect_timeout', DEFAULT_TIMEOUT),
+ ('read_timeout', DEFAULT_TIMEOUT),
+ ('parameter_validation', True),
+ ('max_pool_connections', MAX_POOL_CONNECTIONS),
+ ('proxies', None),
+ ('proxies_config', None),
+ ('s3', None),
+ ('retries', None),
+ ('client_cert', None),
+ ('inject_host_prefix', None),
+ ('endpoint_discovery_enabled', None),
+ ('use_dualstack_endpoint', None),
+ ('use_fips_endpoint', None),
+ ('ignore_configured_endpoint_urls', None),
+ ('defaults_mode', None),
+ ('tcp_keepalive', None),
+ ('request_min_compression_size_bytes', None),
+ ('disable_request_compression', None),
+ ('client_context_params', None),
+ ('sigv4a_signing_region_set', None),
+ ('request_checksum_calculation', None),
+ ('response_checksum_validation', None),
+ ('account_id_endpoint_mode', None),
+ ('auth_scheme_preference', None),
+ ]
+ )
+
+ NON_LEGACY_OPTION_DEFAULTS = {
+ 'connect_timeout': None,
+ }
+
+ # The original default value of the inject_host_prefix parameter was True.
+ # This prevented the ability to override the value from other locations in
+ # the parameter provider chain, like env vars or the shared configuration
+ # file. TO accomplish this, we need to disambiguate when the value was set
+ # by the user or not. This overrides the parameter with a property so the
+ # default value of inject_host_prefix is still True if it is not set by the
+ # user.
+ @property
+ def inject_host_prefix(self):
+ if self._inject_host_prefix == "UNSET":
+ return True
+
+ return self._inject_host_prefix
+
+ # Override the setter for the case where the user does supply a value;
+ # _inject_host_prefix will no longer be "UNSET".
+ @inject_host_prefix.setter
+ def inject_host_prefix(self, value):
+ self._inject_host_prefix = value
+
+ def __init__(self, *args, **kwargs):
+ self._user_provided_options = self._record_user_provided_options(
+ args, kwargs
+ )
+
+ # By default, we use a value that indicates the user did not
+ # set it. This value MUST persist on the Config object to be used
+ # elsewhere.
+ self._inject_host_prefix = 'UNSET'
+
+ # Merge the user_provided options onto the default options
+ config_vars = copy.copy(self.OPTION_DEFAULTS)
+ defaults_mode = self._user_provided_options.get(
+ 'defaults_mode', 'legacy'
+ )
+ if defaults_mode != 'legacy':
+ config_vars.update(self.NON_LEGACY_OPTION_DEFAULTS)
+
+ config_vars.update(self._user_provided_options)
+
+ # Set the attributes based on the config_vars
+ for key, value in config_vars.items():
+ # Default values for the Config object are set here. We don't want
+ # to use `setattr` in the case where the user already supplied a
+ # value.
+ if (
+ key == 'inject_host_prefix'
+ and 'inject_host_prefix'
+ not in self._user_provided_options.keys()
+ ):
+ continue
+ setattr(self, key, value)
+
+ # Validate the s3 options
+ self._validate_s3_configuration(self.s3)
+
+ self._validate_retry_configuration(self.retries)
+
+ def _record_user_provided_options(self, args, kwargs):
+ option_order = list(self.OPTION_DEFAULTS)
+ user_provided_options = {}
+
+ # Iterate through the kwargs passed through to the constructor and
+ # map valid keys to the dictionary
+ for key, value in kwargs.items():
+ if key in self.OPTION_DEFAULTS:
+ user_provided_options[key] = value
+ # The key must exist in the available options
+ else:
+ raise TypeError(f"Got unexpected keyword argument '{key}'")
+
+ # The number of args should not be longer than the allowed
+ # options
+ if len(args) > len(option_order):
+ raise TypeError(
+ f"Takes at most {len(option_order)} arguments ({len(args)} given)"
+ )
+
+ # Iterate through the args passed through to the constructor and map
+ # them to appropriate keys.
+ for i, arg in enumerate(args):
+ # If a kwarg was specified for the arg, then error out
+ if option_order[i] in user_provided_options:
+ raise TypeError(
+ f"Got multiple values for keyword argument '{option_order[i]}'"
+ )
+ user_provided_options[option_order[i]] = arg
+
+ return user_provided_options
+
+ def _validate_s3_configuration(self, s3):
+ if s3 is not None:
+ addressing_style = s3.get('addressing_style')
+ if addressing_style not in ['virtual', 'auto', 'path', None]:
+ raise InvalidS3AddressingStyleError(
+ s3_addressing_style=addressing_style
+ )
+
+ def _validate_retry_configuration(self, retries):
+ valid_options = ('max_attempts', 'mode', 'total_max_attempts')
+ valid_modes = ('legacy', 'standard', 'adaptive')
+ if retries is not None:
+ for key, value in retries.items():
+ if key not in valid_options:
+ raise InvalidRetryConfigurationError(
+ retry_config_option=key,
+ valid_options=valid_options,
+ )
+ if key == 'max_attempts' and value < 0:
+ raise InvalidMaxRetryAttemptsError(
+ provided_max_attempts=value,
+ min_value=0,
+ )
+ if key == 'total_max_attempts' and value < 1:
+ raise InvalidMaxRetryAttemptsError(
+ provided_max_attempts=value,
+ min_value=1,
+ )
+ if key == 'mode' and value not in valid_modes:
+ raise InvalidRetryModeError(
+ provided_retry_mode=value,
+ valid_modes=valid_modes,
+ )
+
+ def merge(self, other_config):
+ """Merges the config object with another config object
+
+ This will merge in all non-default values from the provided config
+ and return a new config object
+
+ :type other_config: botocore.config.Config
+ :param other config: Another config object to merge with. The values
+ in the provided config object will take precedence in the merging
+
+ :returns: A config object built from the merged values of both
+ config objects.
+ """
+ # Make a copy of the current attributes in the config object.
+ config_options = copy.copy(self._user_provided_options)
+
+ # Merge in the user provided options from the other config
+ config_options.update(other_config._user_provided_options)
+
+ # Return a new config object with the merged properties.
+ return Config(**config_options)
diff --git a/py311/lib/python3.11/site-packages/botocore/configloader.py b/py311/lib/python3.11/site-packages/botocore/configloader.py
new file mode 100644
index 0000000000000000000000000000000000000000..0b6c82bcad6061a82cb39926141734a03dc3bd8c
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/configloader.py
@@ -0,0 +1,287 @@
+# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
+# Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import configparser
+import copy
+import os
+import shlex
+import sys
+
+import botocore.exceptions
+
+
+def multi_file_load_config(*filenames):
+ """Load and combine multiple INI configs with profiles.
+
+ This function will take a list of filesnames and return
+ a single dictionary that represents the merging of the loaded
+ config files.
+
+ If any of the provided filenames does not exist, then that file
+ is ignored. It is therefore ok to provide a list of filenames,
+ some of which may not exist.
+
+ Configuration files are **not** deep merged, only the top level
+ keys are merged. The filenames should be passed in order of
+ precedence. The first config file has precedence over the
+ second config file, which has precedence over the third config file,
+ etc. The only exception to this is that the "profiles" key is
+ merged to combine profiles from multiple config files into a
+ single profiles mapping. However, if a profile is defined in
+ multiple config files, then the config file with the highest
+ precedence is used. Profile values themselves are not merged.
+ For example::
+
+ FileA FileB FileC
+ [foo] [foo] [bar]
+ a=1 a=2 a=3
+ b=2
+
+ [bar] [baz] [profile a]
+ a=2 a=3 region=e
+
+ [profile a] [profile b] [profile c]
+ region=c region=d region=f
+
+ The final result of ``multi_file_load_config(FileA, FileB, FileC)``
+ would be::
+
+ {"foo": {"a": 1}, "bar": {"a": 2}, "baz": {"a": 3},
+ "profiles": {"a": {"region": "c"}}, {"b": {"region": d"}},
+ {"c": {"region": "f"}}}
+
+ Note that the "foo" key comes from A, even though it's defined in both
+ FileA and FileB. Because "foo" was defined in FileA first, then the values
+ for "foo" from FileA are used and the values for "foo" from FileB are
+ ignored. Also note where the profiles originate from. Profile "a"
+ comes FileA, profile "b" comes from FileB, and profile "c" comes
+ from FileC.
+
+ """
+ configs = []
+ profiles = []
+ for filename in filenames:
+ try:
+ loaded = load_config(filename)
+ except botocore.exceptions.ConfigNotFound:
+ continue
+ profiles.append(loaded.pop('profiles'))
+ configs.append(loaded)
+ merged_config = _merge_list_of_dicts(configs)
+ merged_profiles = _merge_list_of_dicts(profiles)
+ merged_config['profiles'] = merged_profiles
+ return merged_config
+
+
+def _merge_list_of_dicts(list_of_dicts):
+ merged_dicts = {}
+ for single_dict in list_of_dicts:
+ for key, value in single_dict.items():
+ if key not in merged_dicts:
+ merged_dicts[key] = value
+ return merged_dicts
+
+
+def load_config(config_filename):
+ """Parse a INI config with profiles.
+
+ This will parse an INI config file and map top level profiles
+ into a top level "profile" key.
+
+ If you want to parse an INI file and map all section names to
+ top level keys, use ``raw_config_parse`` instead.
+
+ """
+ parsed = raw_config_parse(config_filename)
+ return build_profile_map(parsed)
+
+
+def raw_config_parse(config_filename, parse_subsections=True):
+ """Returns the parsed INI config contents.
+
+ Each section name is a top level key.
+
+ :param config_filename: The name of the INI file to parse
+
+ :param parse_subsections: If True, parse indented blocks as
+ subsections that represent their own configuration dictionary.
+ For example, if the config file had the contents::
+
+ s3 =
+ signature_version = s3v4
+ addressing_style = path
+
+ The resulting ``raw_config_parse`` would be::
+
+ {'s3': {'signature_version': 's3v4', 'addressing_style': 'path'}}
+
+ If False, do not try to parse subsections and return the indented
+ block as its literal value::
+
+ {'s3': '\nsignature_version = s3v4\naddressing_style = path'}
+
+ :returns: A dict with keys for each profile found in the config
+ file and the value of each key being a dict containing name
+ value pairs found in that profile.
+
+ :raises: ConfigNotFound, ConfigParseError
+ """
+ config = {}
+ path = config_filename
+ if path is not None:
+ path = os.path.expandvars(path)
+ path = os.path.expanduser(path)
+ if not os.path.isfile(path):
+ raise botocore.exceptions.ConfigNotFound(path=_unicode_path(path))
+ cp = configparser.RawConfigParser()
+ try:
+ cp.read([path])
+ except (configparser.Error, UnicodeDecodeError) as e:
+ raise botocore.exceptions.ConfigParseError(
+ path=_unicode_path(path), error=e
+ ) from None
+ else:
+ for section in cp.sections():
+ config[section] = {}
+ for option in cp.options(section):
+ config_value = cp.get(section, option)
+ if parse_subsections and config_value.startswith('\n'):
+ # Then we need to parse the inner contents as
+ # hierarchical. We support a single level
+ # of nesting for now.
+ try:
+ config_value = _parse_nested(config_value)
+ except ValueError as e:
+ raise botocore.exceptions.ConfigParseError(
+ path=_unicode_path(path), error=e
+ ) from None
+ config[section][option] = config_value
+ return config
+
+
+def _unicode_path(path):
+ if isinstance(path, str):
+ return path
+ # According to the documentation getfilesystemencoding can return None
+ # on unix in which case the default encoding is used instead.
+ filesystem_encoding = sys.getfilesystemencoding()
+ if filesystem_encoding is None:
+ filesystem_encoding = sys.getdefaultencoding()
+ return path.decode(filesystem_encoding, 'replace')
+
+
+def _parse_nested(config_value):
+ # Given a value like this:
+ # \n
+ # foo = bar
+ # bar = baz
+ # We need to parse this into
+ # {'foo': 'bar', 'bar': 'baz}
+ parsed = {}
+ for line in config_value.splitlines():
+ line = line.strip()
+ if not line:
+ continue
+ # The caller will catch ValueError
+ # and raise an appropriate error
+ # if this fails.
+ key, value = line.split('=', 1)
+ parsed[key.strip()] = value.strip()
+ return parsed
+
+
+def _parse_section(key, values):
+ result = {}
+ try:
+ parts = shlex.split(key)
+ except ValueError:
+ return result
+ if len(parts) == 2:
+ result[parts[1]] = values
+ return result
+
+
+def build_profile_map(parsed_ini_config):
+ """Convert the parsed INI config into a profile map.
+
+ The config file format requires that every profile except the
+ default to be prepended with "profile", e.g.::
+
+ [profile test]
+ aws_... = foo
+ aws_... = bar
+
+ [profile bar]
+ aws_... = foo
+ aws_... = bar
+
+ # This is *not* a profile
+ [preview]
+ otherstuff = 1
+
+ # Neither is this
+ [foobar]
+ morestuff = 2
+
+ The build_profile_map will take a parsed INI config file where each top
+ level key represents a section name, and convert into a format where all
+ the profiles are under a single top level "profiles" key, and each key in
+ the sub dictionary is a profile name. For example, the above config file
+ would be converted from::
+
+ {"profile test": {"aws_...": "foo", "aws...": "bar"},
+ "profile bar": {"aws...": "foo", "aws...": "bar"},
+ "preview": {"otherstuff": ...},
+ "foobar": {"morestuff": ...},
+ }
+
+ into::
+
+ {"profiles": {"test": {"aws_...": "foo", "aws...": "bar"},
+ "bar": {"aws...": "foo", "aws...": "bar"},
+ "preview": {"otherstuff": ...},
+ "foobar": {"morestuff": ...},
+ }
+
+ If there are no profiles in the provided parsed INI contents, then
+ an empty dict will be the value associated with the ``profiles`` key.
+
+ .. note::
+
+ This will not mutate the passed in parsed_ini_config. Instead it will
+ make a deepcopy and return that value.
+
+ """
+ parsed_config = copy.deepcopy(parsed_ini_config)
+ profiles = {}
+ sso_sessions = {}
+ services = {}
+ final_config = {}
+ for key, values in parsed_config.items():
+ if key.startswith("profile"):
+ profiles.update(_parse_section(key, values))
+ elif key.startswith("sso-session"):
+ sso_sessions.update(_parse_section(key, values))
+ elif key.startswith("services"):
+ services.update(_parse_section(key, values))
+ elif key == 'default':
+ # default section is special and is considered a profile
+ # name but we don't require you use 'profile "default"'
+ # as a section.
+ profiles[key] = values
+ else:
+ final_config[key] = values
+ final_config['profiles'] = profiles
+ final_config['sso_sessions'] = sso_sessions
+ final_config['services'] = services
+ return final_config
diff --git a/py311/lib/python3.11/site-packages/botocore/configprovider.py b/py311/lib/python3.11/site-packages/botocore/configprovider.py
new file mode 100644
index 0000000000000000000000000000000000000000..b4a9c5b757e475d71a27466cc3a1ed895709ab09
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/configprovider.py
@@ -0,0 +1,1051 @@
+# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+"""This module contains the interface for controlling how configuration
+is loaded.
+"""
+
+import copy
+import logging
+import os
+
+from botocore import utils
+from botocore.exceptions import InvalidConfigError
+
+logger = logging.getLogger(__name__)
+
+
+#: A default dictionary that maps the logical names for session variables
+#: to the specific environment variables and configuration file names
+#: that contain the values for these variables.
+#: When creating a new Session object, you can pass in your own dictionary
+#: to remap the logical names or to add new logical names. You can then
+#: get the current value for these variables by using the
+#: ``get_config_variable`` method of the :class:`botocore.session.Session`
+#: class.
+#: These form the keys of the dictionary. The values in the dictionary
+#: are tuples of (, , ,
+#: ).
+#: The conversion func is a function that takes the configuration value
+#: as an argument and returns the converted value. If this value is
+#: None, then the configuration value is returned unmodified. This
+#: conversion function can be used to type convert config values to
+#: values other than the default values of strings.
+#: The ``profile`` and ``config_file`` variables should always have a
+#: None value for the first entry in the tuple because it doesn't make
+#: sense to look inside the config file for the location of the config
+#: file or for the default profile to use.
+#: The ``config_name`` is the name to look for in the configuration file,
+#: the ``env var`` is the OS environment variable (``os.environ``) to
+#: use, and ``default_value`` is the value to use if no value is otherwise
+#: found.
+#: NOTE: Fixing the spelling of this variable would be a breaking change.
+#: Please leave as is.
+BOTOCORE_DEFAUT_SESSION_VARIABLES = {
+ # logical: config_file, env_var, default_value, conversion_func
+ 'profile': (None, ['AWS_DEFAULT_PROFILE', 'AWS_PROFILE'], None, None),
+ 'region': ('region', 'AWS_DEFAULT_REGION', None, None),
+ 'data_path': ('data_path', 'AWS_DATA_PATH', None, None),
+ 'config_file': (None, 'AWS_CONFIG_FILE', '~/.aws/config', None),
+ 'ca_bundle': ('ca_bundle', 'AWS_CA_BUNDLE', None, None),
+ 'api_versions': ('api_versions', None, {}, None),
+ # This is the shared credentials file amongst sdks.
+ 'credentials_file': (
+ None,
+ 'AWS_SHARED_CREDENTIALS_FILE',
+ '~/.aws/credentials',
+ None,
+ ),
+ # These variables only exist in the config file.
+ # This is the number of seconds until we time out a request to
+ # the instance metadata service.
+ 'metadata_service_timeout': (
+ 'metadata_service_timeout',
+ 'AWS_METADATA_SERVICE_TIMEOUT',
+ 1,
+ int,
+ ),
+ # This is the number of request attempts we make until we give
+ # up trying to retrieve data from the instance metadata service.
+ 'metadata_service_num_attempts': (
+ 'metadata_service_num_attempts',
+ 'AWS_METADATA_SERVICE_NUM_ATTEMPTS',
+ 1,
+ int,
+ ),
+ 'ec2_metadata_service_endpoint': (
+ 'ec2_metadata_service_endpoint',
+ 'AWS_EC2_METADATA_SERVICE_ENDPOINT',
+ None,
+ None,
+ ),
+ 'ec2_metadata_service_endpoint_mode': (
+ 'ec2_metadata_service_endpoint_mode',
+ 'AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE',
+ None,
+ None,
+ ),
+ 'ec2_metadata_v1_disabled': (
+ 'ec2_metadata_v1_disabled',
+ 'AWS_EC2_METADATA_V1_DISABLED',
+ False,
+ utils.ensure_boolean,
+ ),
+ 'imds_use_ipv6': (
+ 'imds_use_ipv6',
+ 'AWS_IMDS_USE_IPV6',
+ False,
+ utils.ensure_boolean,
+ ),
+ 'use_dualstack_endpoint': (
+ 'use_dualstack_endpoint',
+ 'AWS_USE_DUALSTACK_ENDPOINT',
+ None,
+ utils.ensure_boolean,
+ ),
+ 'use_fips_endpoint': (
+ 'use_fips_endpoint',
+ 'AWS_USE_FIPS_ENDPOINT',
+ None,
+ utils.ensure_boolean,
+ ),
+ 'ignore_configured_endpoint_urls': (
+ 'ignore_configured_endpoint_urls',
+ 'AWS_IGNORE_CONFIGURED_ENDPOINT_URLS',
+ None,
+ utils.ensure_boolean,
+ ),
+ 'parameter_validation': ('parameter_validation', None, True, None),
+ # Client side monitoring configurations.
+ # Note: These configurations are considered internal to botocore.
+ # Do not use them until publicly documented.
+ 'csm_enabled': (
+ 'csm_enabled',
+ 'AWS_CSM_ENABLED',
+ False,
+ utils.ensure_boolean,
+ ),
+ 'csm_host': ('csm_host', 'AWS_CSM_HOST', '127.0.0.1', None),
+ 'csm_port': ('csm_port', 'AWS_CSM_PORT', 31000, int),
+ 'csm_client_id': ('csm_client_id', 'AWS_CSM_CLIENT_ID', '', None),
+ # Endpoint discovery configuration
+ 'endpoint_discovery_enabled': (
+ 'endpoint_discovery_enabled',
+ 'AWS_ENDPOINT_DISCOVERY_ENABLED',
+ 'auto',
+ None,
+ ),
+ 'sts_regional_endpoints': (
+ 'sts_regional_endpoints',
+ 'AWS_STS_REGIONAL_ENDPOINTS',
+ 'regional',
+ None,
+ ),
+ 'retry_mode': ('retry_mode', 'AWS_RETRY_MODE', 'legacy', None),
+ 'defaults_mode': ('defaults_mode', 'AWS_DEFAULTS_MODE', 'legacy', None),
+ # We can't have a default here for v1 because we need to defer to
+ # whatever the defaults are in _retry.json.
+ 'max_attempts': ('max_attempts', 'AWS_MAX_ATTEMPTS', None, int),
+ 'user_agent_appid': ('sdk_ua_app_id', 'AWS_SDK_UA_APP_ID', None, None),
+ 'request_min_compression_size_bytes': (
+ 'request_min_compression_size_bytes',
+ 'AWS_REQUEST_MIN_COMPRESSION_SIZE_BYTES',
+ 10240,
+ None,
+ ),
+ 'disable_request_compression': (
+ 'disable_request_compression',
+ 'AWS_DISABLE_REQUEST_COMPRESSION',
+ False,
+ utils.ensure_boolean,
+ ),
+ 'sigv4a_signing_region_set': (
+ 'sigv4a_signing_region_set',
+ 'AWS_SIGV4A_SIGNING_REGION_SET',
+ None,
+ None,
+ ),
+ 'request_checksum_calculation': (
+ 'request_checksum_calculation',
+ 'AWS_REQUEST_CHECKSUM_CALCULATION',
+ "when_supported",
+ None,
+ ),
+ 'response_checksum_validation': (
+ 'response_checksum_validation',
+ 'AWS_RESPONSE_CHECKSUM_VALIDATION',
+ "when_supported",
+ None,
+ ),
+ 'account_id_endpoint_mode': (
+ 'account_id_endpoint_mode',
+ 'AWS_ACCOUNT_ID_ENDPOINT_MODE',
+ 'preferred',
+ None,
+ ),
+ 'disable_host_prefix_injection': (
+ 'disable_host_prefix_injection',
+ 'AWS_DISABLE_HOST_PREFIX_INJECTION',
+ None,
+ utils.ensure_boolean,
+ ),
+ 'auth_scheme_preference': (
+ 'auth_scheme_preference',
+ 'AWS_AUTH_SCHEME_PREFERENCE',
+ None,
+ None,
+ ),
+}
+# A mapping for the s3 specific configuration vars. These are the configuration
+# vars that typically go in the s3 section of the config file. This mapping
+# follows the same schema as the previous session variable mapping.
+DEFAULT_S3_CONFIG_VARS = {
+ 'addressing_style': (('s3', 'addressing_style'), None, None, None),
+ 'use_accelerate_endpoint': (
+ ('s3', 'use_accelerate_endpoint'),
+ None,
+ None,
+ utils.ensure_boolean,
+ ),
+ 'use_dualstack_endpoint': (
+ ('s3', 'use_dualstack_endpoint'),
+ None,
+ None,
+ utils.ensure_boolean,
+ ),
+ 'payload_signing_enabled': (
+ ('s3', 'payload_signing_enabled'),
+ None,
+ None,
+ utils.ensure_boolean,
+ ),
+ 'use_arn_region': (
+ ['s3_use_arn_region', ('s3', 'use_arn_region')],
+ 'AWS_S3_USE_ARN_REGION',
+ None,
+ utils.ensure_boolean,
+ ),
+ 'us_east_1_regional_endpoint': (
+ [
+ 's3_us_east_1_regional_endpoint',
+ ('s3', 'us_east_1_regional_endpoint'),
+ ],
+ 'AWS_S3_US_EAST_1_REGIONAL_ENDPOINT',
+ None,
+ None,
+ ),
+ 's3_disable_multiregion_access_points': (
+ ('s3', 's3_disable_multiregion_access_points'),
+ 'AWS_S3_DISABLE_MULTIREGION_ACCESS_POINTS',
+ None,
+ utils.ensure_boolean,
+ ),
+}
+# A mapping for the proxy specific configuration vars. These are
+# used to configure how botocore interacts with proxy setups while
+# sending requests.
+DEFAULT_PROXIES_CONFIG_VARS = {
+ 'proxy_ca_bundle': ('proxy_ca_bundle', None, None, None),
+ 'proxy_client_cert': ('proxy_client_cert', None, None, None),
+ 'proxy_use_forwarding_for_https': (
+ 'proxy_use_forwarding_for_https',
+ None,
+ None,
+ utils.normalize_boolean,
+ ),
+}
+
+
+def create_botocore_default_config_mapping(session):
+ chain_builder = ConfigChainFactory(session=session)
+ config_mapping = _create_config_chain_mapping(
+ chain_builder, BOTOCORE_DEFAUT_SESSION_VARIABLES
+ )
+ config_mapping['s3'] = SectionConfigProvider(
+ 's3',
+ session,
+ _create_config_chain_mapping(chain_builder, DEFAULT_S3_CONFIG_VARS),
+ )
+ config_mapping['proxies_config'] = SectionConfigProvider(
+ 'proxies_config',
+ session,
+ _create_config_chain_mapping(
+ chain_builder, DEFAULT_PROXIES_CONFIG_VARS
+ ),
+ )
+ return config_mapping
+
+
+def _create_config_chain_mapping(chain_builder, config_variables):
+ mapping = {}
+ for logical_name, config in config_variables.items():
+ mapping[logical_name] = chain_builder.create_config_chain(
+ instance_name=logical_name,
+ env_var_names=config[1],
+ config_property_names=config[0],
+ default=config[2],
+ conversion_func=config[3],
+ )
+ return mapping
+
+
+class DefaultConfigResolver:
+ def __init__(self, default_config_data):
+ self._base_default_config = default_config_data['base']
+ self._modes = default_config_data['modes']
+ self._resolved_default_configurations = {}
+
+ def _resolve_default_values_by_mode(self, mode):
+ default_config = self._base_default_config.copy()
+ modifications = self._modes.get(mode)
+
+ for config_var in modifications:
+ default_value = default_config[config_var]
+ modification_dict = modifications[config_var]
+ modification = list(modification_dict.keys())[0]
+ modification_value = modification_dict[modification]
+ if modification == 'multiply':
+ default_value *= modification_value
+ elif modification == 'add':
+ default_value += modification_value
+ elif modification == 'override':
+ default_value = modification_value
+ default_config[config_var] = default_value
+ return default_config
+
+ def get_default_modes(self):
+ default_modes = ['legacy', 'auto']
+ default_modes.extend(self._modes.keys())
+ return default_modes
+
+ def get_default_config_values(self, mode):
+ if mode not in self._resolved_default_configurations:
+ defaults = self._resolve_default_values_by_mode(mode)
+ self._resolved_default_configurations[mode] = defaults
+ return self._resolved_default_configurations[mode]
+
+
+class ConfigChainFactory:
+ """Factory class to create our most common configuration chain case.
+
+ This is a convenience class to construct configuration chains that follow
+ our most common pattern. This is to prevent ordering them incorrectly,
+ and to make the config chain construction more readable.
+ """
+
+ def __init__(self, session, environ=None):
+ """Initialize a ConfigChainFactory.
+
+ :type session: :class:`botocore.session.Session`
+ :param session: This is the session that should be used to look up
+ values from the config file.
+
+ :type environ: dict
+ :param environ: A mapping to use for environment variables. If this
+ is not provided it will default to use os.environ.
+ """
+ self._session = session
+ if environ is None:
+ environ = os.environ
+ self._environ = environ
+
+ def create_config_chain(
+ self,
+ instance_name=None,
+ env_var_names=None,
+ config_property_names=None,
+ default=None,
+ conversion_func=None,
+ ):
+ """Build a config chain following the standard botocore pattern.
+
+ In botocore most of our config chains follow the the precendence:
+ session_instance_variables, environment, config_file, default_value.
+
+ This is a convenience function for creating a chain that follow
+ that precendence.
+
+ :type instance_name: str
+ :param instance_name: This indicates what session instance variable
+ corresponds to this config value. If it is None it will not be
+ added to the chain.
+
+ :type env_var_names: str or list of str or None
+ :param env_var_names: One or more environment variable names to
+ search for this value. They are searched in order. If it is None
+ it will not be added to the chain.
+
+ :type config_property_names: str/tuple or list of str/tuple or None
+ :param config_property_names: One of more strings or tuples
+ representing the name of the key in the config file for this
+ config option. They are searched in order. If it is None it will
+ not be added to the chain.
+
+ :type default: Any
+ :param default: Any constant value to be returned.
+
+ :type conversion_func: None or callable
+ :param conversion_func: If this value is None then it has no effect on
+ the return type. Otherwise, it is treated as a function that will
+ conversion_func our provided type.
+
+ :rvalue: ConfigChain
+ :returns: A ConfigChain that resolves in the order env_var_names ->
+ config_property_name -> default. Any values that were none are
+ omitted form the chain.
+ """
+ providers = []
+ if instance_name is not None:
+ providers.append(
+ InstanceVarProvider(
+ instance_var=instance_name, session=self._session
+ )
+ )
+ if env_var_names is not None:
+ providers.extend(self._get_env_providers(env_var_names))
+ if config_property_names is not None:
+ providers.extend(
+ self._get_scoped_config_providers(config_property_names)
+ )
+ if default is not None:
+ providers.append(ConstantProvider(value=default))
+
+ return ChainProvider(
+ providers=providers,
+ conversion_func=conversion_func,
+ )
+
+ def _get_env_providers(self, env_var_names):
+ env_var_providers = []
+ if not isinstance(env_var_names, list):
+ env_var_names = [env_var_names]
+ for env_var_name in env_var_names:
+ env_var_providers.append(
+ EnvironmentProvider(name=env_var_name, env=self._environ)
+ )
+ return env_var_providers
+
+ def _get_scoped_config_providers(self, config_property_names):
+ scoped_config_providers = []
+ if not isinstance(config_property_names, list):
+ config_property_names = [config_property_names]
+ for config_property_name in config_property_names:
+ scoped_config_providers.append(
+ ScopedConfigProvider(
+ config_var_name=config_property_name,
+ session=self._session,
+ )
+ )
+ return scoped_config_providers
+
+
+class ConfigValueStore:
+ """The ConfigValueStore object stores configuration values."""
+
+ def __init__(self, mapping=None):
+ """Initialize a ConfigValueStore.
+
+ :type mapping: dict
+ :param mapping: The mapping parameter is a map of string to a subclass
+ of BaseProvider. When a config variable is asked for via the
+ get_config_variable method, the corresponding provider will be
+ invoked to load the value.
+ """
+ self._overrides = {}
+ self._mapping = {}
+ if mapping is not None:
+ for logical_name, provider in mapping.items():
+ self.set_config_provider(logical_name, provider)
+
+ def __deepcopy__(self, memo):
+ config_store = ConfigValueStore(copy.deepcopy(self._mapping, memo))
+ for logical_name, override_value in self._overrides.items():
+ config_store.set_config_variable(logical_name, override_value)
+
+ return config_store
+
+ def __copy__(self):
+ config_store = ConfigValueStore(copy.copy(self._mapping))
+ for logical_name, override_value in self._overrides.items():
+ config_store.set_config_variable(logical_name, override_value)
+
+ return config_store
+
+ def get_config_variable(self, logical_name):
+ """
+ Retrieve the value associated with the specified logical_name
+ from the corresponding provider. If no value is found None will
+ be returned.
+
+ :type logical_name: str
+ :param logical_name: The logical name of the session variable
+ you want to retrieve. This name will be mapped to the
+ appropriate environment variable name for this session as
+ well as the appropriate config file entry.
+
+ :returns: value of variable or None if not defined.
+ """
+ if logical_name in self._overrides:
+ return self._overrides[logical_name]
+ if logical_name not in self._mapping:
+ return None
+ provider = self._mapping[logical_name]
+ return provider.provide()
+
+ def get_config_provider(self, logical_name):
+ """
+ Retrieve the provider associated with the specified logical_name.
+ If no provider is found None will be returned.
+
+ :type logical_name: str
+ :param logical_name: The logical name of the session variable
+ you want to retrieve. This name will be mapped to the
+ appropriate environment variable name for this session as
+ well as the appropriate config file entry.
+
+ :returns: configuration provider or None if not defined.
+ """
+ if (
+ logical_name in self._overrides
+ or logical_name not in self._mapping
+ ):
+ return None
+ provider = self._mapping[logical_name]
+ return provider
+
+ def set_config_variable(self, logical_name, value):
+ """Set a configuration variable to a specific value.
+
+ By using this method, you can override the normal lookup
+ process used in ``get_config_variable`` by explicitly setting
+ a value. Subsequent calls to ``get_config_variable`` will
+ use the ``value``. This gives you per-session specific
+ configuration values.
+
+ ::
+ >>> # Assume logical name 'foo' maps to env var 'FOO'
+ >>> os.environ['FOO'] = 'myvalue'
+ >>> s.get_config_variable('foo')
+ 'myvalue'
+ >>> s.set_config_variable('foo', 'othervalue')
+ >>> s.get_config_variable('foo')
+ 'othervalue'
+
+ :type logical_name: str
+ :param logical_name: The logical name of the session variable
+ you want to set. These are the keys in ``SESSION_VARIABLES``.
+
+ :param value: The value to associate with the config variable.
+ """
+ self._overrides[logical_name] = value
+
+ def clear_config_variable(self, logical_name):
+ """Remove an override config variable from the session.
+
+ :type logical_name: str
+ :param logical_name: The name of the parameter to clear the override
+ value from.
+ """
+ self._overrides.pop(logical_name, None)
+
+ def set_config_provider(self, logical_name, provider):
+ """Set the provider for a config value.
+
+ This provides control over how a particular configuration value is
+ loaded. This replaces the provider for ``logical_name`` with the new
+ ``provider``.
+
+ :type logical_name: str
+ :param logical_name: The name of the config value to change the config
+ provider for.
+
+ :type provider: :class:`botocore.configprovider.BaseProvider`
+ :param provider: The new provider that should be responsible for
+ providing a value for the config named ``logical_name``.
+ """
+ self._mapping[logical_name] = provider
+
+
+class SmartDefaultsConfigStoreFactory:
+ def __init__(self, default_config_resolver, imds_region_provider):
+ self._default_config_resolver = default_config_resolver
+ self._imds_region_provider = imds_region_provider
+ # Initializing _instance_metadata_region as None so we
+ # can fetch region in a lazy fashion only when needed.
+ self._instance_metadata_region = None
+
+ def merge_smart_defaults(self, config_store, mode, region_name):
+ if mode == 'auto':
+ mode = self.resolve_auto_mode(region_name)
+ default_configs = (
+ self._default_config_resolver.get_default_config_values(mode)
+ )
+ for config_var in default_configs:
+ config_value = default_configs[config_var]
+ method = getattr(self, f'_set_{config_var}', None)
+ if method:
+ method(config_store, config_value)
+
+ def resolve_auto_mode(self, region_name):
+ current_region = None
+ if os.environ.get('AWS_EXECUTION_ENV'):
+ default_region = os.environ.get('AWS_DEFAULT_REGION')
+ current_region = os.environ.get('AWS_REGION', default_region)
+ if not current_region:
+ if self._instance_metadata_region:
+ current_region = self._instance_metadata_region
+ else:
+ try:
+ current_region = self._imds_region_provider.provide()
+ self._instance_metadata_region = current_region
+ except Exception:
+ pass
+
+ if current_region:
+ if region_name == current_region:
+ return 'in-region'
+ else:
+ return 'cross-region'
+ return 'standard'
+
+ def _update_provider(self, config_store, variable, value):
+ original_provider = config_store.get_config_provider(variable)
+ default_provider = ConstantProvider(value)
+ if isinstance(original_provider, ChainProvider):
+ chain_provider_copy = copy.deepcopy(original_provider)
+ chain_provider_copy.set_default_provider(default_provider)
+ default_provider = chain_provider_copy
+ elif isinstance(original_provider, BaseProvider):
+ default_provider = ChainProvider(
+ providers=[original_provider, default_provider]
+ )
+ config_store.set_config_provider(variable, default_provider)
+
+ def _update_section_provider(
+ self, config_store, section_name, variable, value
+ ):
+ section_provider_copy = copy.deepcopy(
+ config_store.get_config_provider(section_name)
+ )
+ section_provider_copy.set_default_provider(
+ variable, ConstantProvider(value)
+ )
+ config_store.set_config_provider(section_name, section_provider_copy)
+
+ def _set_retryMode(self, config_store, value):
+ self._update_provider(config_store, 'retry_mode', value)
+
+ def _set_stsRegionalEndpoints(self, config_store, value):
+ self._update_provider(config_store, 'sts_regional_endpoints', value)
+
+ def _set_s3UsEast1RegionalEndpoints(self, config_store, value):
+ self._update_section_provider(
+ config_store, 's3', 'us_east_1_regional_endpoint', value
+ )
+
+ def _set_connectTimeoutInMillis(self, config_store, value):
+ self._update_provider(config_store, 'connect_timeout', value / 1000)
+
+
+class BaseProvider:
+ """Base class for configuration value providers.
+
+ A configuration provider has some method of providing a configuration
+ value.
+ """
+
+ def provide(self):
+ """Provide a config value."""
+ raise NotImplementedError('provide')
+
+
+class ChainProvider(BaseProvider):
+ """This provider wraps one or more other providers.
+
+ Each provider in the chain is called, the first one returning a non-None
+ value is then returned.
+ """
+
+ def __init__(self, providers=None, conversion_func=None):
+ """Initalize a ChainProvider.
+
+ :type providers: list
+ :param providers: The initial list of providers to check for values
+ when invoked.
+
+ :type conversion_func: None or callable
+ :param conversion_func: If this value is None then it has no affect on
+ the return type. Otherwise, it is treated as a function that will
+ transform provided value.
+ """
+ if providers is None:
+ providers = []
+ self._providers = providers
+ self._conversion_func = conversion_func
+
+ def __deepcopy__(self, memo):
+ return ChainProvider(
+ copy.deepcopy(self._providers, memo), self._conversion_func
+ )
+
+ def provide(self):
+ """Provide the value from the first provider to return non-None.
+
+ Each provider in the chain has its provide method called. The first
+ one in the chain to return a non-None value is the returned from the
+ ChainProvider. When no non-None value is found, None is returned.
+ """
+ for provider in self._providers:
+ value = provider.provide()
+ if value is not None:
+ return self._convert_type(value)
+ return None
+
+ def set_default_provider(self, default_provider):
+ if self._providers and isinstance(
+ self._providers[-1], ConstantProvider
+ ):
+ self._providers[-1] = default_provider
+ else:
+ self._providers.append(default_provider)
+
+ num_of_constants = sum(
+ isinstance(provider, ConstantProvider)
+ for provider in self._providers
+ )
+ if num_of_constants > 1:
+ logger.info(
+ 'ChainProvider object contains multiple '
+ 'instances of ConstantProvider objects'
+ )
+
+ def _convert_type(self, value):
+ if self._conversion_func is not None:
+ return self._conversion_func(value)
+ return value
+
+ def __repr__(self):
+ return '[{}]'.format(', '.join([str(p) for p in self._providers]))
+
+
+class InstanceVarProvider(BaseProvider):
+ """This class loads config values from the session instance vars."""
+
+ def __init__(self, instance_var, session):
+ """Initialize InstanceVarProvider.
+
+ :type instance_var: str
+ :param instance_var: The instance variable to load from the session.
+
+ :type session: :class:`botocore.session.Session`
+ :param session: The botocore session to get the loaded configuration
+ file variables from.
+ """
+ self._instance_var = instance_var
+ self._session = session
+
+ def __deepcopy__(self, memo):
+ return InstanceVarProvider(
+ copy.deepcopy(self._instance_var, memo), self._session
+ )
+
+ def provide(self):
+ """Provide a config value from the session instance vars."""
+ instance_vars = self._session.instance_variables()
+ value = instance_vars.get(self._instance_var)
+ return value
+
+ def __repr__(self):
+ return f'InstanceVarProvider(instance_var={self._instance_var}, session={self._session})'
+
+
+class ScopedConfigProvider(BaseProvider):
+ def __init__(self, config_var_name, session):
+ """Initialize ScopedConfigProvider.
+
+ :type config_var_name: str or tuple
+ :param config_var_name: The name of the config variable to load from
+ the configuration file. If the value is a tuple, it must only
+ consist of two items, where the first item represents the section
+ and the second item represents the config var name in the section.
+
+ :type session: :class:`botocore.session.Session`
+ :param session: The botocore session to get the loaded configuration
+ file variables from.
+ """
+ self._config_var_name = config_var_name
+ self._session = session
+
+ def __deepcopy__(self, memo):
+ return ScopedConfigProvider(
+ copy.deepcopy(self._config_var_name, memo), self._session
+ )
+
+ def provide(self):
+ """Provide a value from a config file property."""
+ scoped_config = self._session.get_scoped_config()
+ if isinstance(self._config_var_name, tuple):
+ section_config = scoped_config.get(self._config_var_name[0])
+ if not isinstance(section_config, dict):
+ return None
+ return section_config.get(self._config_var_name[1])
+ return scoped_config.get(self._config_var_name)
+
+ def __repr__(self):
+ return f'ScopedConfigProvider(config_var_name={self._config_var_name}, session={self._session})'
+
+
+class EnvironmentProvider(BaseProvider):
+ """This class loads config values from environment variables."""
+
+ def __init__(self, name, env):
+ """Initialize with the keys in the dictionary to check.
+
+ :type name: str
+ :param name: The key with that name will be loaded and returned.
+
+ :type env: dict
+ :param env: Environment variables dictionary to get variables from.
+ """
+ self._name = name
+ self._env = env
+
+ def __deepcopy__(self, memo):
+ return EnvironmentProvider(
+ copy.deepcopy(self._name, memo), copy.deepcopy(self._env, memo)
+ )
+
+ def provide(self):
+ """Provide a config value from a source dictionary."""
+ if self._name in self._env:
+ return self._env[self._name]
+ return None
+
+ def __repr__(self):
+ return f'EnvironmentProvider(name={self._name}, env={self._env})'
+
+
+class SectionConfigProvider(BaseProvider):
+ """Provides a dictionary from a section in the scoped config
+
+ This is useful for retrieving scoped config variables (i.e. s3) that have
+ their own set of config variables and resolving logic.
+ """
+
+ def __init__(self, section_name, session, override_providers=None):
+ self._section_name = section_name
+ self._session = session
+ self._scoped_config_provider = ScopedConfigProvider(
+ self._section_name, self._session
+ )
+ self._override_providers = override_providers
+ if self._override_providers is None:
+ self._override_providers = {}
+
+ def __deepcopy__(self, memo):
+ return SectionConfigProvider(
+ copy.deepcopy(self._section_name, memo),
+ self._session,
+ copy.deepcopy(self._override_providers, memo),
+ )
+
+ def provide(self):
+ section_config = self._scoped_config_provider.provide()
+ if section_config and not isinstance(section_config, dict):
+ logger.debug(
+ "The %s config key is not a dictionary type, "
+ "ignoring its value of: %s",
+ self._section_name,
+ section_config,
+ )
+ return None
+ for section_config_var, provider in self._override_providers.items():
+ provider_val = provider.provide()
+ if provider_val is not None:
+ if section_config is None:
+ section_config = {}
+ section_config[section_config_var] = provider_val
+ return section_config
+
+ def set_default_provider(self, key, default_provider):
+ provider = self._override_providers.get(key)
+ if isinstance(provider, ChainProvider):
+ provider.set_default_provider(default_provider)
+ return
+ elif isinstance(provider, BaseProvider):
+ default_provider = ChainProvider(
+ providers=[provider, default_provider]
+ )
+ self._override_providers[key] = default_provider
+
+ def __repr__(self):
+ return (
+ f'SectionConfigProvider(section_name={self._section_name}, '
+ f'session={self._session}, '
+ f'override_providers={self._override_providers})'
+ )
+
+
+class ConstantProvider(BaseProvider):
+ """This provider provides a constant value."""
+
+ def __init__(self, value):
+ self._value = value
+
+ def __deepcopy__(self, memo):
+ return ConstantProvider(copy.deepcopy(self._value, memo))
+
+ def provide(self):
+ """Provide the constant value given during initialization."""
+ return self._value
+
+ def __repr__(self):
+ return f'ConstantProvider(value={self._value})'
+
+
+class ConfiguredEndpointProvider(BaseProvider):
+ """Lookup an endpoint URL from environment variable or shared config file.
+
+ NOTE: This class is considered private and is subject to abrupt breaking
+ changes or removal without prior announcement. Please do not use it
+ directly.
+ """
+
+ _ENDPOINT_URL_LOOKUP_ORDER = [
+ 'environment_service',
+ 'environment_global',
+ 'config_service',
+ 'config_global',
+ ]
+
+ def __init__(
+ self,
+ full_config,
+ scoped_config,
+ client_name,
+ environ=None,
+ ):
+ """Initialize a ConfiguredEndpointProviderChain.
+
+ :type full_config: dict
+ :param full_config: This is the dict representing the full
+ configuration file.
+
+ :type scoped_config: dict
+ :param scoped_config: This is the dict representing the configuration
+ for the current profile for the session.
+
+ :type client_name: str
+ :param client_name: The name used to instantiate a client using
+ botocore.session.Session.create_client.
+
+ :type environ: dict
+ :param environ: A mapping to use for environment variables. If this
+ is not provided it will default to use os.environ.
+ """
+ self._full_config = full_config
+ self._scoped_config = scoped_config
+ self._client_name = client_name
+ self._transformed_service_id = self._get_snake_case_service_id(
+ self._client_name
+ )
+ if environ is None:
+ environ = os.environ
+ self._environ = environ
+
+ def provide(self):
+ """Lookup the configured endpoint URL.
+
+ The order is:
+
+ 1. The value provided by a service-specific environment variable.
+ 2. The value provided by the global endpoint environment variable
+ (AWS_ENDPOINT_URL).
+ 3. The value provided by a service-specific parameter from a services
+ definition section in the shared configuration file.
+ 4. The value provided by the global parameter from a services
+ definition section in the shared configuration file.
+ """
+ for location in self._ENDPOINT_URL_LOOKUP_ORDER:
+ logger.debug(
+ 'Looking for endpoint for %s via: %s',
+ self._client_name,
+ location,
+ )
+
+ endpoint_url = getattr(self, f'_get_endpoint_url_{location}')()
+
+ if endpoint_url:
+ logger.info(
+ 'Found endpoint for %s via: %s.',
+ self._client_name,
+ location,
+ )
+ return endpoint_url
+
+ logger.debug('No configured endpoint found.')
+ return None
+
+ def _get_snake_case_service_id(self, client_name):
+ # Get the service ID without loading the service data file, accounting
+ # for any aliases and standardizing the names with hyphens.
+ client_name = utils.SERVICE_NAME_ALIASES.get(client_name, client_name)
+ hyphenized_service_id = (
+ utils.CLIENT_NAME_TO_HYPHENIZED_SERVICE_ID_OVERRIDES.get(
+ client_name, client_name
+ )
+ )
+ return hyphenized_service_id.replace('-', '_')
+
+ def _get_service_env_var_name(self):
+ transformed_service_id_env = self._transformed_service_id.upper()
+ return f'AWS_ENDPOINT_URL_{transformed_service_id_env}'
+
+ def _get_services_config(self):
+ if 'services' not in self._scoped_config:
+ return {}
+
+ section_name = self._scoped_config['services']
+ services_section = self._full_config.get('services', {}).get(
+ section_name
+ )
+
+ if not services_section:
+ error_msg = (
+ f'The profile is configured to use the services '
+ f'section but the "{section_name}" services '
+ f'configuration does not exist.'
+ )
+ raise InvalidConfigError(error_msg=error_msg)
+
+ return services_section
+
+ def _get_endpoint_url_config_service(self):
+ snakecase_service_id = self._transformed_service_id.lower()
+ return (
+ self._get_services_config()
+ .get(snakecase_service_id, {})
+ .get('endpoint_url')
+ )
+
+ def _get_endpoint_url_config_global(self):
+ return self._scoped_config.get('endpoint_url')
+
+ def _get_endpoint_url_environment_service(self):
+ return EnvironmentProvider(
+ name=self._get_service_env_var_name(), env=self._environ
+ ).provide()
+
+ def _get_endpoint_url_environment_global(self):
+ return EnvironmentProvider(
+ name='AWS_ENDPOINT_URL', env=self._environ
+ ).provide()
diff --git a/py311/lib/python3.11/site-packages/botocore/context.py b/py311/lib/python3.11/site-packages/botocore/context.py
new file mode 100644
index 0000000000000000000000000000000000000000..8034747729c1a48acea02bd7b430544b5c98c812
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/context.py
@@ -0,0 +1,127 @@
+# Copyright 2025 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+"""
+NOTE: All classes and functions in this module are considered private and are
+subject to abrupt breaking changes. Please do not use them directly.
+"""
+
+from contextlib import contextmanager
+from contextvars import ContextVar
+from copy import deepcopy
+from dataclasses import dataclass, field
+from functools import wraps
+
+
+@dataclass
+class ClientContext:
+ """
+ Encapsulation of objects tracked within the ``_context`` context variable.
+
+ ``features`` is a set responsible for storing features used during
+ preparation of an AWS request. ``botocore.useragent.register_feature_id``
+ is used to add to this set.
+ """
+
+ features: set[str] = field(default_factory=set)
+
+
+_context = ContextVar("_context")
+
+
+def get_context():
+ """Get the current ``_context`` context variable if set, else None."""
+ return _context.get(None)
+
+
+def set_context(ctx):
+ """Set the current ``_context`` context variable.
+
+ :type ctx: ClientContext
+ :param ctx: Client context object to set as the current context variable.
+
+ :rtype: contextvars.Token
+ :returns: Token object used to revert the context variable to what it was
+ before the corresponding set.
+ """
+ token = _context.set(ctx)
+ return token
+
+
+def reset_context(token):
+ """Reset the current ``_context`` context variable.
+
+ :type token: contextvars.Token
+ :param token: Token object to reset the context variable.
+ """
+ _context.reset(token)
+
+
+@contextmanager
+def start_as_current_context(ctx=None):
+ """
+ Context manager that copies the passed or current context object and sets
+ it as the current context variable. If no context is found, a new
+ ``ClientContext`` object is created. It mainly ensures the context variable
+ is reset to the previous value once the executed code returns.
+
+ Example usage:
+
+ def my_feature():
+ with start_as_current_context():
+ register_feature_id('MY_FEATURE')
+ pass
+
+ :type ctx: ClientContext
+ :param ctx: The client context object to set as the new context variable.
+ If not provided, the current or a new context variable is used.
+ """
+ current = ctx or get_context()
+ if current is None:
+ new = ClientContext()
+ else:
+ new = deepcopy(current)
+ token = set_context(new)
+ try:
+ yield
+ finally:
+ reset_context(token)
+
+
+def with_current_context(hook=None):
+ """
+ Decorator that wraps ``start_as_current_context`` and optionally invokes a
+ hook within the newly-set context. This is just syntactic sugar to avoid
+ indenting existing code under the context manager.
+
+ Example usage:
+
+ @with_current_context(partial(register_feature_id, 'MY_FEATURE'))
+ def my_feature():
+ pass
+
+ :type hook: callable
+ :param hook: A callable that will be invoked within the scope of the
+ ``start_as_current_context`` context manager.
+ """
+
+ def decorator(func):
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ with start_as_current_context():
+ if hook:
+ hook()
+ return func(*args, **kwargs)
+
+ return wrapper
+
+ return decorator
diff --git a/py311/lib/python3.11/site-packages/botocore/credentials.py b/py311/lib/python3.11/site-packages/botocore/credentials.py
new file mode 100644
index 0000000000000000000000000000000000000000..571dfeac6a5cd695d76394068a3c069d7863e547
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/credentials.py
@@ -0,0 +1,2781 @@
+# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
+# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import base64
+import datetime
+import getpass
+import json
+import logging
+import os
+import subprocess
+import threading
+import time
+import uuid
+from collections import namedtuple
+from copy import deepcopy
+from hashlib import sha1, sha256
+
+import dateutil.parser
+from dateutil.parser import parse
+from dateutil.tz import tzlocal, tzutc
+
+import botocore.compat
+import botocore.configloader
+from botocore import UNSIGNED
+from botocore.compat import (
+ EC,
+ compat_shell_split,
+ total_seconds,
+)
+from botocore.config import Config
+from botocore.exceptions import (
+ ConfigNotFound,
+ CredentialRetrievalError,
+ InfiniteLoopConfigError,
+ InvalidConfigError,
+ LoginError,
+ LoginInsufficientPermissions,
+ LoginRefreshRequired,
+ LoginTokenLoadError,
+ MetadataRetrievalError,
+ MissingDependencyException,
+ PartialCredentialsError,
+ RefreshWithMFAUnsupportedError,
+ UnauthorizedSSOTokenError,
+ UnknownCredentialError,
+)
+from botocore.tokens import SSOTokenProvider
+from botocore.useragent import register_feature_id, register_feature_ids
+from botocore.utils import (
+ ArnParser,
+ ContainerMetadataFetcher,
+ FileWebIdentityTokenLoader,
+ InstanceMetadataFetcher,
+ JSONFileCache,
+ LoginTokenLoader,
+ SSOTokenLoader,
+ create_nested_client,
+ get_login_token_cache_directory,
+ parse_key_val_file,
+ resolve_imds_endpoint_mode,
+)
+
+logger = logging.getLogger(__name__)
+ReadOnlyCredentials = namedtuple(
+ 'ReadOnlyCredentials',
+ ['access_key', 'secret_key', 'token', 'account_id'],
+ defaults=(None,),
+)
+
+_DEFAULT_MANDATORY_REFRESH_TIMEOUT = 10 * 60 # 10 min
+_DEFAULT_ADVISORY_REFRESH_TIMEOUT = 15 * 60 # 15 min
+
+
+def create_credential_resolver(session, cache=None, region_name=None):
+ """Create a default credential resolver.
+
+ This creates a pre-configured credential resolver
+ that includes the default lookup chain for
+ credentials.
+
+ """
+ profile_name = session.get_config_variable('profile') or 'default'
+ metadata_timeout = session.get_config_variable('metadata_service_timeout')
+ num_attempts = session.get_config_variable('metadata_service_num_attempts')
+ disable_env_vars = session.instance_variables().get('profile') is not None
+
+ imds_config = {
+ 'ec2_metadata_service_endpoint': session.get_config_variable(
+ 'ec2_metadata_service_endpoint'
+ ),
+ 'ec2_metadata_service_endpoint_mode': resolve_imds_endpoint_mode(
+ session
+ ),
+ 'ec2_credential_refresh_window': _DEFAULT_ADVISORY_REFRESH_TIMEOUT,
+ 'ec2_metadata_v1_disabled': session.get_config_variable(
+ 'ec2_metadata_v1_disabled'
+ ),
+ }
+
+ if cache is None:
+ cache = {}
+
+ env_provider = EnvProvider()
+ container_provider = ContainerProvider()
+ instance_metadata_provider = InstanceMetadataProvider(
+ iam_role_fetcher=InstanceMetadataFetcher(
+ timeout=metadata_timeout,
+ num_attempts=num_attempts,
+ user_agent=session.user_agent(),
+ config=imds_config,
+ )
+ )
+
+ profile_provider_builder = ProfileProviderBuilder(
+ session, cache=cache, region_name=region_name
+ )
+ assume_role_provider = AssumeRoleProvider(
+ load_config=lambda: session.full_config,
+ client_creator=_get_client_creator(session, region_name),
+ cache=cache,
+ profile_name=profile_name,
+ credential_sourcer=CanonicalNameCredentialSourcer(
+ [env_provider, container_provider, instance_metadata_provider]
+ ),
+ profile_provider_builder=profile_provider_builder,
+ )
+
+ pre_profile = [
+ env_provider,
+ assume_role_provider,
+ ]
+ profile_providers = profile_provider_builder.providers(
+ profile_name=profile_name,
+ disable_env_vars=disable_env_vars,
+ )
+ post_profile = [
+ OriginalEC2Provider(),
+ BotoProvider(),
+ container_provider,
+ instance_metadata_provider,
+ ]
+ providers = pre_profile + profile_providers + post_profile
+
+ if disable_env_vars:
+ # An explicitly provided profile will negate an EnvProvider.
+ # We will defer to providers that understand the "profile"
+ # concept to retrieve credentials.
+ # The one edge case if is all three values are provided via
+ # env vars:
+ # export AWS_ACCESS_KEY_ID=foo
+ # export AWS_SECRET_ACCESS_KEY=bar
+ # export AWS_PROFILE=baz
+ # Then, just like our client() calls, the explicit credentials
+ # will take precedence.
+ #
+ # This precedence is enforced by leaving the EnvProvider in the chain.
+ # This means that the only way a "profile" would win is if the
+ # EnvProvider does not return credentials, which is what we want
+ # in this scenario.
+ providers.remove(env_provider)
+ logger.debug(
+ 'Skipping environment variable credential check'
+ ' because profile name was explicitly set.'
+ )
+
+ resolver = CredentialResolver(providers=providers)
+ return resolver
+
+
+class ProfileProviderBuilder:
+ """This class handles the creation of profile based providers.
+
+ NOTE: This class is only intended for internal use.
+
+ This class handles the creation and ordering of the various credential
+ providers that primarly source their configuration from the shared config.
+ This is needed to enable sharing between the default credential chain and
+ the source profile chain created by the assume role provider.
+ """
+
+ def __init__(
+ self,
+ session,
+ cache=None,
+ region_name=None,
+ sso_token_cache=None,
+ login_token_cache=None,
+ ):
+ self._session = session
+ self._cache = cache
+ self._region_name = region_name
+ self._sso_token_cache = sso_token_cache
+ self._login_token_cache = login_token_cache
+
+ def providers(self, profile_name, disable_env_vars=False):
+ return [
+ self._create_web_identity_provider(
+ profile_name,
+ disable_env_vars,
+ ),
+ self._create_sso_provider(profile_name),
+ self._create_shared_credential_provider(profile_name),
+ self._create_login_provider(profile_name),
+ self._create_process_provider(profile_name),
+ self._create_config_provider(profile_name),
+ ]
+
+ def _create_process_provider(self, profile_name):
+ return ProcessProvider(
+ profile_name=profile_name,
+ load_config=lambda: self._session.full_config,
+ )
+
+ def _create_shared_credential_provider(self, profile_name):
+ credential_file = self._session.get_config_variable('credentials_file')
+ return SharedCredentialProvider(
+ profile_name=profile_name,
+ creds_filename=credential_file,
+ )
+
+ def _create_config_provider(self, profile_name):
+ config_file = self._session.get_config_variable('config_file')
+ return ConfigProvider(
+ profile_name=profile_name,
+ config_filename=config_file,
+ )
+
+ def _create_web_identity_provider(self, profile_name, disable_env_vars):
+ return AssumeRoleWithWebIdentityProvider(
+ load_config=lambda: self._session.full_config,
+ client_creator=_get_client_creator(
+ self._session, self._region_name
+ ),
+ cache=self._cache,
+ profile_name=profile_name,
+ disable_env_vars=disable_env_vars,
+ )
+
+ def _create_sso_provider(self, profile_name):
+ return SSOProvider(
+ load_config=lambda: self._session.full_config,
+ client_creator=self._session.create_client,
+ profile_name=profile_name,
+ cache=self._cache,
+ token_cache=self._sso_token_cache,
+ token_provider=SSOTokenProvider(
+ self._session,
+ cache=self._sso_token_cache,
+ profile_name=profile_name,
+ ),
+ )
+
+ def _create_login_provider(self, profile_name):
+ return LoginProvider(
+ load_config=lambda: self._session.full_config,
+ client_creator=self._session.create_client,
+ profile_name=profile_name,
+ token_cache=self._login_token_cache,
+ )
+
+
+def get_credentials(session):
+ resolver = create_credential_resolver(session)
+ return resolver.load_credentials()
+
+
+def _local_now():
+ return datetime.datetime.now(tzlocal())
+
+
+def _parse_if_needed(value):
+ if isinstance(value, datetime.datetime):
+ return value
+ return parse(value)
+
+
+def _serialize_if_needed(value, iso=False):
+ if isinstance(value, datetime.datetime):
+ if iso:
+ return value.isoformat()
+ return value.strftime('%Y-%m-%dT%H:%M:%S%Z')
+ return value
+
+
+def _get_client_creator(session, region_name):
+ def client_creator(service_name, **kwargs):
+ create_client_kwargs = {'region_name': region_name}
+ create_client_kwargs.update(**kwargs)
+ return create_nested_client(
+ session, service_name, **create_client_kwargs
+ )
+
+ return client_creator
+
+
+def create_assume_role_refresher(client, params):
+ def refresh():
+ response = client.assume_role(**params)
+ credentials = response['Credentials']
+ # We need to normalize the credential names to
+ # the values expected by the refresh creds.
+ return {
+ 'access_key': credentials['AccessKeyId'],
+ 'secret_key': credentials['SecretAccessKey'],
+ 'token': credentials['SessionToken'],
+ 'expiry_time': _serialize_if_needed(credentials['Expiration']),
+ }
+
+ return refresh
+
+
+def create_mfa_serial_refresher(actual_refresh):
+ class _Refresher:
+ def __init__(self, refresh):
+ self._refresh = refresh
+ self._has_been_called = False
+
+ def __call__(self):
+ if self._has_been_called:
+ # We can explore an option in the future to support
+ # reprompting for MFA, but for now we just error out
+ # when the temp creds expire.
+ raise RefreshWithMFAUnsupportedError()
+ self._has_been_called = True
+ return self._refresh()
+
+ return _Refresher(actual_refresh)
+
+
+class Credentials:
+ """
+ Holds the credentials needed to authenticate requests.
+
+ :param str access_key: The access key part of the credentials.
+ :param str secret_key: The secret key part of the credentials.
+ :param str token: The security token, valid only for session credentials.
+ :param str method: A string which identifies where the credentials
+ were found.
+ :param str account_id: (optional) An account ID associated with the credentials.
+ """
+
+ def __init__(
+ self, access_key, secret_key, token=None, method=None, account_id=None
+ ):
+ self.access_key = access_key
+ self.secret_key = secret_key
+ self.token = token
+
+ if method is None:
+ method = 'explicit'
+ self.method = method
+ self.account_id = account_id
+
+ self._normalize()
+
+ def _normalize(self):
+ # Keys would sometimes (accidentally) contain non-ascii characters.
+ # It would cause a confusing UnicodeDecodeError in Python 2.
+ # We explicitly convert them into unicode to avoid such error.
+ #
+ # Eventually the service will decide whether to accept the credential.
+ # This also complies with the behavior in Python 3.
+ self.access_key = botocore.compat.ensure_unicode(self.access_key)
+ self.secret_key = botocore.compat.ensure_unicode(self.secret_key)
+
+ def get_frozen_credentials(self):
+ return ReadOnlyCredentials(
+ self.access_key, self.secret_key, self.token, self.account_id
+ )
+
+ def get_deferred_property(self, property_name):
+ def get_property():
+ return getattr(self, property_name, None)
+
+ return get_property
+
+
+class RefreshableCredentials(Credentials):
+ """
+ Holds the credentials needed to authenticate requests. In addition, it
+ knows how to refresh itself.
+
+ :param str access_key: The access key part of the credentials.
+ :param str secret_key: The secret key part of the credentials.
+ :param str token: The security token, valid only for session credentials.
+ :param datetime expiry_time: The expiration time of the credentials.
+ :param function refresh_using: Callback function to refresh the credentials.
+ :param str method: A string which identifies where the credentials
+ were found.
+ :param function time_fetcher: Callback function to retrieve current time.
+ """
+
+ # The time at which we'll attempt to refresh, but not
+ # block if someone else is refreshing.
+ _advisory_refresh_timeout = _DEFAULT_ADVISORY_REFRESH_TIMEOUT
+ # The time at which all threads will block waiting for
+ # refreshed credentials.
+ _mandatory_refresh_timeout = _DEFAULT_MANDATORY_REFRESH_TIMEOUT
+
+ def __init__(
+ self,
+ access_key,
+ secret_key,
+ token,
+ expiry_time,
+ refresh_using,
+ method,
+ time_fetcher=_local_now,
+ advisory_timeout=None,
+ mandatory_timeout=None,
+ account_id=None,
+ ):
+ self._refresh_using = refresh_using
+ self._access_key = access_key
+ self._secret_key = secret_key
+ self._token = token
+ self._account_id = account_id
+ self._expiry_time = expiry_time
+ self._time_fetcher = time_fetcher
+ self._refresh_lock = threading.Lock()
+ self.method = method
+ self._frozen_credentials = ReadOnlyCredentials(
+ access_key, secret_key, token, account_id
+ )
+ self._normalize()
+ if advisory_timeout is not None:
+ self._advisory_refresh_timeout = advisory_timeout
+ if mandatory_timeout is not None:
+ self._mandatory_refresh_timeout = mandatory_timeout
+
+ def _normalize(self):
+ self._access_key = botocore.compat.ensure_unicode(self._access_key)
+ self._secret_key = botocore.compat.ensure_unicode(self._secret_key)
+
+ @classmethod
+ def create_from_metadata(
+ cls,
+ metadata,
+ refresh_using,
+ method,
+ advisory_timeout=None,
+ mandatory_timeout=None,
+ ):
+ kwargs = {}
+ if advisory_timeout is not None:
+ kwargs['advisory_timeout'] = advisory_timeout
+ if mandatory_timeout is not None:
+ kwargs['mandatory_timeout'] = mandatory_timeout
+
+ instance = cls(
+ access_key=metadata['access_key'],
+ secret_key=metadata['secret_key'],
+ token=metadata['token'],
+ expiry_time=cls._expiry_datetime(metadata['expiry_time']),
+ method=method,
+ refresh_using=refresh_using,
+ account_id=metadata.get('account_id'),
+ **kwargs,
+ )
+ return instance
+
+ @property
+ def access_key(self):
+ """Warning: Using this property can lead to race conditions if you
+ access another property subsequently along the refresh boundary.
+ Please use get_frozen_credentials instead.
+ """
+ self._refresh()
+ return self._access_key
+
+ @access_key.setter
+ def access_key(self, value):
+ self._access_key = value
+
+ @property
+ def secret_key(self):
+ """Warning: Using this property can lead to race conditions if you
+ access another property subsequently along the refresh boundary.
+ Please use get_frozen_credentials instead.
+ """
+ self._refresh()
+ return self._secret_key
+
+ @secret_key.setter
+ def secret_key(self, value):
+ self._secret_key = value
+
+ @property
+ def token(self):
+ """Warning: Using this property can lead to race conditions if you
+ access another property subsequently along the refresh boundary.
+ Please use get_frozen_credentials instead.
+ """
+ self._refresh()
+ return self._token
+
+ @token.setter
+ def token(self, value):
+ self._token = value
+
+ @property
+ def account_id(self):
+ """Warning: Using this property can lead to race conditions if you
+ access another property subsequently along the refresh boundary.
+ Please use get_frozen_credentials instead.
+ """
+ self._refresh()
+ return self._account_id
+
+ @account_id.setter
+ def account_id(self, value):
+ self._account_id = value
+
+ def _seconds_remaining(self):
+ delta = self._expiry_time - self._time_fetcher()
+ return total_seconds(delta)
+
+ def refresh_needed(self, refresh_in=None):
+ """Check if a refresh is needed.
+
+ A refresh is needed if the expiry time associated
+ with the temporary credentials is less than the
+ provided ``refresh_in``. If ``time_delta`` is not
+ provided, ``self.advisory_refresh_needed`` will be used.
+
+ For example, if your temporary credentials expire
+ in 10 minutes and the provided ``refresh_in`` is
+ ``15 * 60``, then this function will return ``True``.
+
+ :type refresh_in: int
+ :param refresh_in: The number of seconds before the
+ credentials expire in which refresh attempts should
+ be made.
+
+ :return: True if refresh needed, False otherwise.
+
+ """
+ if self._expiry_time is None:
+ # No expiration, so assume we don't need to refresh.
+ return False
+
+ if refresh_in is None:
+ refresh_in = self._advisory_refresh_timeout
+ # The credentials should be refreshed if they're going to expire
+ # in less than 5 minutes.
+ if self._seconds_remaining() >= refresh_in:
+ # There's enough time left. Don't refresh.
+ return False
+ logger.debug("Credentials need to be refreshed.")
+ return True
+
+ def _is_expired(self):
+ # Checks if the current credentials are expired.
+ return self.refresh_needed(refresh_in=0)
+
+ def _refresh(self):
+ # In the common case where we don't need a refresh, we
+ # can immediately exit and not require acquiring the
+ # refresh lock.
+ if not self.refresh_needed(self._advisory_refresh_timeout):
+ return
+
+ # acquire() doesn't accept kwargs, but False is indicating
+ # that we should not block if we can't acquire the lock.
+ # If we aren't able to acquire the lock, we'll trigger
+ # the else clause.
+ if self._refresh_lock.acquire(False):
+ try:
+ if not self.refresh_needed(self._advisory_refresh_timeout):
+ return
+ is_mandatory_refresh = self.refresh_needed(
+ self._mandatory_refresh_timeout
+ )
+ self._protected_refresh(is_mandatory=is_mandatory_refresh)
+ return
+ finally:
+ self._refresh_lock.release()
+ elif self.refresh_needed(self._mandatory_refresh_timeout):
+ # If we're within the mandatory refresh window,
+ # we must block until we get refreshed credentials.
+ with self._refresh_lock:
+ if not self.refresh_needed(self._mandatory_refresh_timeout):
+ return
+ self._protected_refresh(is_mandatory=True)
+
+ def _protected_refresh(self, is_mandatory):
+ # precondition: this method should only be called if you've acquired
+ # the self._refresh_lock.
+ try:
+ metadata = self._refresh_using()
+ except Exception:
+ period_name = 'mandatory' if is_mandatory else 'advisory'
+ logger.warning(
+ "Refreshing temporary credentials failed "
+ "during %s refresh period.",
+ period_name,
+ exc_info=True,
+ )
+ if is_mandatory:
+ # If this is a mandatory refresh, then
+ # all errors that occur when we attempt to refresh
+ # credentials are propagated back to the user.
+ raise
+ # Otherwise we'll just return.
+ # The end result will be that we'll use the current
+ # set of temporary credentials we have.
+ return
+ self._set_from_data(metadata)
+ self._frozen_credentials = ReadOnlyCredentials(
+ self._access_key, self._secret_key, self._token, self._account_id
+ )
+ if self._is_expired():
+ # We successfully refreshed credentials but for whatever
+ # reason, our refreshing function returned credentials
+ # that are still expired. In this scenario, the only
+ # thing we can do is let the user know and raise
+ # an exception.
+ msg = (
+ "Credentials were refreshed, but the "
+ "refreshed credentials are still expired."
+ )
+ logger.warning(msg)
+ raise RuntimeError(msg)
+
+ @staticmethod
+ def _expiry_datetime(time_str):
+ return parse(time_str)
+
+ def _set_from_data(self, data):
+ expected_keys = ['access_key', 'secret_key', 'token', 'expiry_time']
+ if not data:
+ missing_keys = expected_keys
+ else:
+ missing_keys = [k for k in expected_keys if k not in data]
+
+ if missing_keys:
+ message = "Credential refresh failed, response did not contain: %s"
+ raise CredentialRetrievalError(
+ provider=self.method,
+ error_msg=message % ', '.join(missing_keys),
+ )
+
+ self.access_key = data['access_key']
+ self.secret_key = data['secret_key']
+ self.token = data['token']
+ self._expiry_time = parse(data['expiry_time'])
+ self.account_id = data.get('account_id')
+ logger.debug(
+ "Retrieved credentials will expire at: %s", self._expiry_time
+ )
+ self._normalize()
+
+ def get_frozen_credentials(self):
+ """Return immutable credentials.
+
+ The ``access_key``, ``secret_key``, and ``token`` properties
+ on this class will always check and refresh credentials if
+ needed before returning the particular credentials.
+
+ This has an edge case where you can get inconsistent
+ credentials. Imagine this:
+
+ # Current creds are "t1"
+ tmp.access_key ---> expired? no, so return t1.access_key
+ # ---- time is now expired, creds need refreshing to "t2" ----
+ tmp.secret_key ---> expired? yes, refresh and return t2.secret_key
+
+ This means we're using the access key from t1 with the secret key
+ from t2. To fix this issue, you can request a frozen credential object
+ which is guaranteed not to change.
+
+ The frozen credentials returned from this method should be used
+ immediately and then discarded. The typical usage pattern would
+ be::
+
+ creds = RefreshableCredentials(...)
+ some_code = SomeSignerObject()
+ # I'm about to sign the request.
+ # The frozen credentials are only used for the
+ # duration of generate_presigned_url and will be
+ # immediately thrown away.
+ request = some_code.sign_some_request(
+ with_credentials=creds.get_frozen_credentials())
+ print("Signed request:", request)
+
+ """
+ self._refresh()
+ return self._frozen_credentials
+
+
+class DeferredRefreshableCredentials(RefreshableCredentials):
+ """Refreshable credentials that don't require initial credentials.
+
+ refresh_using will be called upon first access.
+ """
+
+ def __init__(self, refresh_using, method, time_fetcher=_local_now):
+ self._refresh_using = refresh_using
+ self._access_key = None
+ self._secret_key = None
+ self._token = None
+ self._account_id = None
+ self._expiry_time = None
+ self._time_fetcher = time_fetcher
+ self._refresh_lock = threading.Lock()
+ self.method = method
+ self._frozen_credentials = None
+
+ def refresh_needed(self, refresh_in=None):
+ if self._frozen_credentials is None:
+ return True
+ return super().refresh_needed(refresh_in)
+
+
+class CachedCredentialFetcher:
+ DEFAULT_EXPIRY_WINDOW_SECONDS = 60 * 15
+
+ def __init__(self, cache=None, expiry_window_seconds=None):
+ if cache is None:
+ cache = {}
+ self._cache = cache
+ self._cache_key = self._create_cache_key()
+ if expiry_window_seconds is None:
+ expiry_window_seconds = self.DEFAULT_EXPIRY_WINDOW_SECONDS
+ self._expiry_window_seconds = expiry_window_seconds
+ self.feature_ids = set()
+
+ def _create_cache_key(self):
+ raise NotImplementedError('_create_cache_key()')
+
+ def _make_file_safe(self, filename):
+ # Replace :, path sep, and / to make it the string filename safe.
+ filename = filename.replace(':', '_').replace(os.sep, '_')
+ return filename.replace('/', '_')
+
+ def _get_credentials(self):
+ raise NotImplementedError('_get_credentials()')
+
+ def fetch_credentials(self):
+ return self._get_cached_credentials()
+
+ def _get_cached_credentials(self):
+ """Get up-to-date credentials.
+
+ This will check the cache for up-to-date credentials, calling assume
+ role if none are available.
+ """
+ response = self._load_from_cache()
+ if response is None:
+ response = self._get_credentials()
+ self._write_to_cache(response)
+ else:
+ logger.debug("Credentials for role retrieved from cache.")
+
+ creds = response['Credentials']
+ expiration = _serialize_if_needed(creds['Expiration'], iso=True)
+ credentials = {
+ 'access_key': creds['AccessKeyId'],
+ 'secret_key': creds['SecretAccessKey'],
+ 'token': creds['SessionToken'],
+ 'expiry_time': expiration,
+ 'account_id': creds.get('AccountId'),
+ }
+
+ return credentials
+
+ def _load_from_cache(self):
+ if self._cache_key in self._cache:
+ creds = deepcopy(self._cache[self._cache_key])
+ if not self._is_expired(creds):
+ return creds
+ else:
+ logger.debug(
+ "Credentials were found in cache, but they are expired."
+ )
+ return None
+
+ def _write_to_cache(self, response):
+ self._cache[self._cache_key] = deepcopy(response)
+
+ def _is_expired(self, credentials):
+ """Check if credentials are expired."""
+ end_time = _parse_if_needed(credentials['Credentials']['Expiration'])
+ seconds = total_seconds(end_time - _local_now())
+ return seconds < self._expiry_window_seconds
+
+
+class BaseAssumeRoleCredentialFetcher(CachedCredentialFetcher):
+ def __init__(
+ self,
+ client_creator,
+ role_arn,
+ extra_args=None,
+ cache=None,
+ expiry_window_seconds=None,
+ ):
+ self._client_creator = client_creator
+ self._role_arn = role_arn
+
+ if extra_args is None:
+ self._assume_kwargs = {}
+ else:
+ self._assume_kwargs = deepcopy(extra_args)
+ self._assume_kwargs['RoleArn'] = self._role_arn
+
+ self._role_session_name = self._assume_kwargs.get('RoleSessionName')
+ self._using_default_session_name = False
+ if not self._role_session_name:
+ self._generate_assume_role_name()
+
+ super().__init__(cache, expiry_window_seconds)
+
+ def _generate_assume_role_name(self):
+ self._role_session_name = f'botocore-session-{int(time.time())}'
+ self._assume_kwargs['RoleSessionName'] = self._role_session_name
+ self._using_default_session_name = True
+
+ def _create_cache_key(self):
+ """Create a predictable cache key for the current configuration.
+
+ The cache key is intended to be compatible with file names.
+ """
+ args = deepcopy(self._assume_kwargs)
+
+ # The role session name gets randomly generated, so we don't want it
+ # in the hash.
+ if self._using_default_session_name:
+ del args['RoleSessionName']
+
+ if 'Policy' in args:
+ # To have a predictable hash, the keys of the policy must be
+ # sorted, so we have to load it here to make sure it gets sorted
+ # later on.
+ args['Policy'] = json.loads(args['Policy'])
+
+ args = json.dumps(args, sort_keys=True)
+ argument_hash = sha1(args.encode('utf-8')).hexdigest()
+ return self._make_file_safe(argument_hash)
+
+ def _add_account_id_to_response(self, response):
+ role_arn = response.get('AssumedRoleUser', {}).get('Arn')
+ if ArnParser.is_arn(role_arn):
+ arn_parser = ArnParser()
+ account_id = arn_parser.parse_arn(role_arn)['account']
+ response['Credentials']['AccountId'] = account_id
+ else:
+ logger.debug("Unable to extract account ID from Arn: %s", role_arn)
+
+
+class AssumeRoleCredentialFetcher(BaseAssumeRoleCredentialFetcher):
+ def __init__(
+ self,
+ client_creator,
+ source_credentials,
+ role_arn,
+ extra_args=None,
+ mfa_prompter=None,
+ cache=None,
+ expiry_window_seconds=None,
+ ):
+ """
+ :type client_creator: callable
+ :param client_creator: A callable that creates a client taking
+ arguments like ``Session.create_client``.
+
+ :type source_credentials: Credentials
+ :param source_credentials: The credentials to use to create the
+ client for the call to AssumeRole.
+
+ :type role_arn: str
+ :param role_arn: The ARN of the role to be assumed.
+
+ :type extra_args: dict
+ :param extra_args: Any additional arguments to add to the assume
+ role request using the format of the botocore operation.
+ Possible keys include, but may not be limited to,
+ DurationSeconds, Policy, SerialNumber, ExternalId and
+ RoleSessionName.
+
+ :type mfa_prompter: callable
+ :param mfa_prompter: A callable that returns input provided by the
+ user (i.e raw_input, getpass.getpass, etc.).
+
+ :type cache: dict
+ :param cache: An object that supports ``__getitem__``,
+ ``__setitem__``, and ``__contains__``. An example of this is
+ the ``JSONFileCache`` class in aws-cli.
+
+ :type expiry_window_seconds: int
+ :param expiry_window_seconds: The amount of time, in seconds,
+ """
+ self._source_credentials = source_credentials
+ self._mfa_prompter = mfa_prompter
+ if self._mfa_prompter is None:
+ self._mfa_prompter = getpass.getpass
+
+ super().__init__(
+ client_creator,
+ role_arn,
+ extra_args=extra_args,
+ cache=cache,
+ expiry_window_seconds=expiry_window_seconds,
+ )
+
+ def _get_credentials(self):
+ """Get credentials by calling assume role."""
+ register_feature_ids(self.feature_ids)
+ kwargs = self._assume_role_kwargs()
+ client = self._create_client()
+ response = client.assume_role(**kwargs)
+ self._add_account_id_to_response(response)
+ return response
+
+ def _assume_role_kwargs(self):
+ """Get the arguments for assume role based on current configuration."""
+ assume_role_kwargs = deepcopy(self._assume_kwargs)
+
+ mfa_serial = assume_role_kwargs.get('SerialNumber')
+
+ if mfa_serial is not None:
+ prompt = f'Enter MFA code for {mfa_serial}: '
+ token_code = self._mfa_prompter(prompt)
+ assume_role_kwargs['TokenCode'] = token_code
+
+ duration_seconds = assume_role_kwargs.get('DurationSeconds')
+
+ if duration_seconds is not None:
+ assume_role_kwargs['DurationSeconds'] = duration_seconds
+
+ return assume_role_kwargs
+
+ def _create_client(self):
+ """Create an STS client using the source credentials."""
+ frozen_credentials = self._source_credentials.get_frozen_credentials()
+ return self._client_creator(
+ 'sts',
+ aws_access_key_id=frozen_credentials.access_key,
+ aws_secret_access_key=frozen_credentials.secret_key,
+ aws_session_token=frozen_credentials.token,
+ )
+
+
+class AssumeRoleWithWebIdentityCredentialFetcher(
+ BaseAssumeRoleCredentialFetcher
+):
+ def __init__(
+ self,
+ client_creator,
+ web_identity_token_loader,
+ role_arn,
+ extra_args=None,
+ cache=None,
+ expiry_window_seconds=None,
+ ):
+ """
+ :type client_creator: callable
+ :param client_creator: A callable that creates a client taking
+ arguments like ``Session.create_client``.
+
+ :type web_identity_token_loader: callable
+ :param web_identity_token_loader: A callable that takes no arguments
+ and returns a web identity token str.
+
+ :type role_arn: str
+ :param role_arn: The ARN of the role to be assumed.
+
+ :type extra_args: dict
+ :param extra_args: Any additional arguments to add to the assume
+ role request using the format of the botocore operation.
+ Possible keys include, but may not be limited to,
+ DurationSeconds, Policy, SerialNumber, ExternalId and
+ RoleSessionName.
+
+ :type cache: dict
+ :param cache: An object that supports ``__getitem__``,
+ ``__setitem__``, and ``__contains__``. An example of this is
+ the ``JSONFileCache`` class in aws-cli.
+
+ :type expiry_window_seconds: int
+ :param expiry_window_seconds: The amount of time, in seconds,
+ """
+ self._web_identity_token_loader = web_identity_token_loader
+
+ super().__init__(
+ client_creator,
+ role_arn,
+ extra_args=extra_args,
+ cache=cache,
+ expiry_window_seconds=expiry_window_seconds,
+ )
+
+ def _get_credentials(self):
+ """Get credentials by calling assume role."""
+ register_feature_ids(self.feature_ids)
+ kwargs = self._assume_role_kwargs()
+ # Assume role with web identity does not require credentials other than
+ # the token, explicitly configure the client to not sign requests.
+ config = Config(signature_version=UNSIGNED)
+ client = self._client_creator('sts', config=config)
+ response = client.assume_role_with_web_identity(**kwargs)
+ self._add_account_id_to_response(response)
+ return response
+
+ def _assume_role_kwargs(self):
+ """Get the arguments for assume role based on current configuration."""
+ assume_role_kwargs = deepcopy(self._assume_kwargs)
+ identity_token = self._web_identity_token_loader()
+ assume_role_kwargs['WebIdentityToken'] = identity_token
+
+ return assume_role_kwargs
+
+
+class CredentialProvider:
+ # A short name to identify the provider within botocore.
+ METHOD = None
+
+ # A name to identify the provider for use in cross-sdk features like
+ # assume role's `credential_source` configuration option. These names
+ # are to be treated in a case-insensitive way. NOTE: any providers not
+ # implemented in botocore MUST prefix their canonical names with
+ # 'custom' or we DO NOT guarantee that it will work with any features
+ # that this provides.
+ CANONICAL_NAME = None
+
+ def __init__(self, session=None):
+ self.session = session
+
+ def load(self):
+ """
+ Loads the credentials from their source & sets them on the object.
+
+ Subclasses should implement this method (by reading from disk, the
+ environment, the network or wherever), returning ``True`` if they were
+ found & loaded.
+
+ If not found, this method should return ``False``, indicating that the
+ ``CredentialResolver`` should fall back to the next available method.
+
+ The default implementation does nothing, assuming the user has set the
+ ``access_key/secret_key/token`` themselves.
+
+ :returns: Whether credentials were found & set
+ :rtype: Credentials
+ """
+ return True
+
+ def _extract_creds_from_mapping(self, mapping, *key_names):
+ found = []
+ for key_name in key_names:
+ try:
+ found.append(mapping[key_name])
+ except KeyError:
+ raise PartialCredentialsError(
+ provider=self.METHOD, cred_var=key_name
+ )
+ return found
+
+
+class ProcessProvider(CredentialProvider):
+ METHOD = 'custom-process'
+
+ def __init__(self, profile_name, load_config, popen=subprocess.Popen):
+ self._profile_name = profile_name
+ self._load_config = load_config
+ self._loaded_config = None
+ self._popen = popen
+
+ def load(self):
+ credential_process = self._credential_process
+ if credential_process is None:
+ return
+
+ register_feature_id('CREDENTIALS_PROFILE_PROCESS')
+ creds_dict = self._retrieve_credentials_using(credential_process)
+ register_feature_id('CREDENTIALS_PROCESS')
+ if creds_dict.get('expiry_time') is not None:
+ return RefreshableCredentials.create_from_metadata(
+ creds_dict,
+ lambda: self._retrieve_credentials_using(credential_process),
+ self.METHOD,
+ )
+
+ return Credentials(
+ access_key=creds_dict['access_key'],
+ secret_key=creds_dict['secret_key'],
+ token=creds_dict.get('token'),
+ method=self.METHOD,
+ account_id=creds_dict.get('account_id'),
+ )
+
+ def _retrieve_credentials_using(self, credential_process):
+ # We're not using shell=True, so we need to pass the
+ # command and all arguments as a list.
+ process_list = compat_shell_split(credential_process)
+ p = self._popen(
+ process_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE
+ )
+ stdout, stderr = p.communicate()
+ if p.returncode != 0:
+ raise CredentialRetrievalError(
+ provider=self.METHOD, error_msg=stderr.decode('utf-8')
+ )
+ parsed = botocore.compat.json.loads(stdout.decode('utf-8'))
+ version = parsed.get('Version', '')
+ if version != 1:
+ raise CredentialRetrievalError(
+ provider=self.METHOD,
+ error_msg=(
+ f"Unsupported version '{version}' for credential process "
+ f"provider, supported versions: 1"
+ ),
+ )
+ try:
+ return {
+ 'access_key': parsed['AccessKeyId'],
+ 'secret_key': parsed['SecretAccessKey'],
+ 'token': parsed.get('SessionToken'),
+ 'expiry_time': parsed.get('Expiration'),
+ 'account_id': self._get_account_id(parsed),
+ }
+ except KeyError as e:
+ raise CredentialRetrievalError(
+ provider=self.METHOD,
+ error_msg=f"Missing required key in response: {e}",
+ )
+
+ @property
+ def _credential_process(self):
+ return self.profile_config.get('credential_process')
+
+ @property
+ def profile_config(self):
+ if self._loaded_config is None:
+ self._loaded_config = self._load_config()
+ profile_config = self._loaded_config.get('profiles', {}).get(
+ self._profile_name, {}
+ )
+ return profile_config
+
+ def _get_account_id(self, parsed):
+ account_id = parsed.get('AccountId')
+ return account_id or self.profile_config.get('aws_account_id')
+
+
+class InstanceMetadataProvider(CredentialProvider):
+ METHOD = 'iam-role'
+ CANONICAL_NAME = 'Ec2InstanceMetadata'
+
+ def __init__(self, iam_role_fetcher):
+ self._role_fetcher = iam_role_fetcher
+
+ def load(self):
+ fetcher = self._role_fetcher
+ # We do the first request, to see if we get useful data back.
+ # If not, we'll pass & move on to whatever's next in the credential
+ # chain.
+ metadata = fetcher.retrieve_iam_role_credentials()
+ if not metadata:
+ return None
+ register_feature_id('CREDENTIALS_IMDS')
+ logger.info(
+ 'Found credentials from IAM Role: %s', metadata['role_name']
+ )
+ # We manually set the data here, since we already made the request &
+ # have it. When the expiry is hit, the credentials will auto-refresh
+ # themselves.
+ creds = RefreshableCredentials.create_from_metadata(
+ metadata,
+ method=self.METHOD,
+ refresh_using=fetcher.retrieve_iam_role_credentials,
+ )
+ return creds
+
+
+class EnvProvider(CredentialProvider):
+ METHOD = 'env'
+ CANONICAL_NAME = 'Environment'
+ ACCESS_KEY = 'AWS_ACCESS_KEY_ID'
+ SECRET_KEY = 'AWS_SECRET_ACCESS_KEY'
+ # The token can come from either of these env var.
+ # AWS_SESSION_TOKEN is what other AWS SDKs have standardized on.
+ TOKENS = ['AWS_SECURITY_TOKEN', 'AWS_SESSION_TOKEN']
+ EXPIRY_TIME = 'AWS_CREDENTIAL_EXPIRATION'
+ ACCOUNT_ID = 'AWS_ACCOUNT_ID'
+
+ def __init__(self, environ=None, mapping=None):
+ """
+
+ :param environ: The environment variables (defaults to
+ ``os.environ`` if no value is provided).
+ :param mapping: An optional mapping of variable names to
+ environment variable names. Use this if you want to
+ change the mapping of access_key->AWS_ACCESS_KEY_ID, etc.
+ The dict can have up to 5 keys:
+ * ``access_key``
+ * ``secret_key``
+ * ``token``
+ * ``expiry_time``
+ * ``account_id``
+ """
+ if environ is None:
+ environ = os.environ
+ self.environ = environ
+ self._mapping = self._build_mapping(mapping)
+
+ def _build_mapping(self, mapping):
+ # Mapping of variable name to env var name.
+ var_mapping = {}
+ if mapping is None:
+ # Use the class var default.
+ var_mapping['access_key'] = self.ACCESS_KEY
+ var_mapping['secret_key'] = self.SECRET_KEY
+ var_mapping['token'] = self.TOKENS
+ var_mapping['expiry_time'] = self.EXPIRY_TIME
+ var_mapping['account_id'] = self.ACCOUNT_ID
+ else:
+ var_mapping['access_key'] = mapping.get(
+ 'access_key', self.ACCESS_KEY
+ )
+ var_mapping['secret_key'] = mapping.get(
+ 'secret_key', self.SECRET_KEY
+ )
+ var_mapping['token'] = mapping.get('token', self.TOKENS)
+ if not isinstance(var_mapping['token'], list):
+ var_mapping['token'] = [var_mapping['token']]
+ var_mapping['expiry_time'] = mapping.get(
+ 'expiry_time', self.EXPIRY_TIME
+ )
+ var_mapping['account_id'] = mapping.get(
+ 'account_id', self.ACCOUNT_ID
+ )
+ return var_mapping
+
+ def load(self):
+ """
+ Search for credentials in explicit environment variables.
+ """
+
+ access_key = self.environ.get(self._mapping['access_key'], '')
+
+ if access_key:
+ logger.info('Found credentials in environment variables.')
+ fetcher = self._create_credentials_fetcher()
+ credentials = fetcher(require_expiry=False)
+ register_feature_id('CREDENTIALS_ENV_VARS')
+
+ expiry_time = credentials['expiry_time']
+ if expiry_time is not None:
+ expiry_time = parse(expiry_time)
+ return RefreshableCredentials(
+ credentials['access_key'],
+ credentials['secret_key'],
+ credentials['token'],
+ expiry_time,
+ refresh_using=fetcher,
+ method=self.METHOD,
+ account_id=credentials['account_id'],
+ )
+
+ return Credentials(
+ credentials['access_key'],
+ credentials['secret_key'],
+ credentials['token'],
+ method=self.METHOD,
+ account_id=credentials['account_id'],
+ )
+ else:
+ return None
+
+ def _create_credentials_fetcher(self):
+ mapping = self._mapping
+ method = self.METHOD
+ environ = self.environ
+
+ def fetch_credentials(require_expiry=True):
+ credentials = {}
+
+ access_key = environ.get(mapping['access_key'], '')
+ if not access_key:
+ raise PartialCredentialsError(
+ provider=method, cred_var=mapping['access_key']
+ )
+ credentials['access_key'] = access_key
+
+ secret_key = environ.get(mapping['secret_key'], '')
+ if not secret_key:
+ raise PartialCredentialsError(
+ provider=method, cred_var=mapping['secret_key']
+ )
+ credentials['secret_key'] = secret_key
+
+ credentials['token'] = None
+ for token_env_var in mapping['token']:
+ token = environ.get(token_env_var, '')
+ if token:
+ credentials['token'] = token
+ break
+
+ credentials['expiry_time'] = None
+ expiry_time = environ.get(mapping['expiry_time'], '')
+ if expiry_time:
+ credentials['expiry_time'] = expiry_time
+ if require_expiry and not expiry_time:
+ raise PartialCredentialsError(
+ provider=method, cred_var=mapping['expiry_time']
+ )
+
+ credentials['account_id'] = None
+ account_id = environ.get(mapping['account_id'], '')
+ if account_id:
+ credentials['account_id'] = account_id
+
+ return credentials
+
+ return fetch_credentials
+
+
+class OriginalEC2Provider(CredentialProvider):
+ METHOD = 'ec2-credentials-file'
+ CANONICAL_NAME = 'Ec2Config'
+
+ CRED_FILE_ENV = 'AWS_CREDENTIAL_FILE'
+ ACCESS_KEY = 'AWSAccessKeyId'
+ SECRET_KEY = 'AWSSecretKey'
+
+ def __init__(self, environ=None, parser=None):
+ if environ is None:
+ environ = os.environ
+ if parser is None:
+ parser = parse_key_val_file
+ self._environ = environ
+ self._parser = parser
+
+ def load(self):
+ """
+ Search for a credential file used by original EC2 CLI tools.
+ """
+ if 'AWS_CREDENTIAL_FILE' in self._environ:
+ full_path = os.path.expanduser(
+ self._environ['AWS_CREDENTIAL_FILE']
+ )
+ creds = self._parser(full_path)
+ if self.ACCESS_KEY in creds:
+ logger.info('Found credentials in AWS_CREDENTIAL_FILE.')
+ access_key = creds[self.ACCESS_KEY]
+ secret_key = creds[self.SECRET_KEY]
+ # EC2 creds file doesn't support session tokens.
+ return Credentials(access_key, secret_key, method=self.METHOD)
+ else:
+ return None
+
+
+class SharedCredentialProvider(CredentialProvider):
+ METHOD = 'shared-credentials-file'
+ CANONICAL_NAME = 'SharedCredentials'
+
+ ACCESS_KEY = 'aws_access_key_id'
+ SECRET_KEY = 'aws_secret_access_key'
+ # Same deal as the EnvProvider above. Botocore originally supported
+ # aws_security_token, but the SDKs are standardizing on aws_session_token
+ # so we support both.
+ TOKENS = ['aws_security_token', 'aws_session_token']
+ ACCOUNT_ID = 'aws_account_id'
+
+ def __init__(self, creds_filename, profile_name=None, ini_parser=None):
+ self._creds_filename = creds_filename
+ if profile_name is None:
+ profile_name = 'default'
+ self._profile_name = profile_name
+ if ini_parser is None:
+ ini_parser = botocore.configloader.raw_config_parse
+ self._ini_parser = ini_parser
+
+ def load(self):
+ try:
+ available_creds = self._ini_parser(self._creds_filename)
+ except ConfigNotFound:
+ return None
+ if self._profile_name in available_creds:
+ config = available_creds[self._profile_name]
+ if self.ACCESS_KEY in config:
+ logger.info(
+ "Found credentials in shared credentials file: %s",
+ self._creds_filename,
+ )
+ access_key, secret_key = self._extract_creds_from_mapping(
+ config, self.ACCESS_KEY, self.SECRET_KEY
+ )
+ token = self._get_session_token(config)
+ account_id = self._get_account_id(config)
+ register_feature_id('CREDENTIALS_PROFILE')
+ return Credentials(
+ access_key,
+ secret_key,
+ token,
+ method=self.METHOD,
+ account_id=account_id,
+ )
+
+ def _get_session_token(self, config):
+ for token_envvar in self.TOKENS:
+ if token_envvar in config:
+ return config[token_envvar]
+
+ def _get_account_id(self, config):
+ return config.get(self.ACCOUNT_ID)
+
+
+class ConfigProvider(CredentialProvider):
+ """INI based config provider with profile sections."""
+
+ METHOD = 'config-file'
+ CANONICAL_NAME = 'SharedConfig'
+
+ ACCESS_KEY = 'aws_access_key_id'
+ SECRET_KEY = 'aws_secret_access_key'
+ # Same deal as the EnvProvider above. Botocore originally supported
+ # aws_security_token, but the SDKs are standardizing on aws_session_token
+ # so we support both.
+ TOKENS = ['aws_security_token', 'aws_session_token']
+ ACCOUNT_ID = 'aws_account_id'
+
+ def __init__(self, config_filename, profile_name, config_parser=None):
+ """
+
+ :param config_filename: The session configuration scoped to the current
+ profile. This is available via ``session.config``.
+ :param profile_name: The name of the current profile.
+ :param config_parser: A config parser callable.
+
+ """
+ self._config_filename = config_filename
+ self._profile_name = profile_name
+ if config_parser is None:
+ config_parser = botocore.configloader.load_config
+ self._config_parser = config_parser
+
+ def load(self):
+ """
+ If there is are credentials in the configuration associated with
+ the session, use those.
+ """
+ try:
+ full_config = self._config_parser(self._config_filename)
+ except ConfigNotFound:
+ return None
+ if self._profile_name in full_config['profiles']:
+ profile_config = full_config['profiles'][self._profile_name]
+ if self.ACCESS_KEY in profile_config:
+ logger.info(
+ "Credentials found in config file: %s",
+ self._config_filename,
+ )
+ access_key, secret_key = self._extract_creds_from_mapping(
+ profile_config, self.ACCESS_KEY, self.SECRET_KEY
+ )
+ token = self._get_session_token(profile_config)
+ account_id = self._get_account_id(profile_config)
+ register_feature_id('CREDENTIALS_PROFILE')
+ return Credentials(
+ access_key,
+ secret_key,
+ token,
+ method=self.METHOD,
+ account_id=account_id,
+ )
+ else:
+ return None
+
+ def _get_session_token(self, profile_config):
+ for token_name in self.TOKENS:
+ if token_name in profile_config:
+ return profile_config[token_name]
+
+ def _get_account_id(self, config):
+ return config.get(self.ACCOUNT_ID)
+
+
+class BotoProvider(CredentialProvider):
+ METHOD = 'boto-config'
+ CANONICAL_NAME = 'Boto2Config'
+
+ BOTO_CONFIG_ENV = 'BOTO_CONFIG'
+ DEFAULT_CONFIG_FILENAMES = ['/etc/boto.cfg', '~/.boto']
+ ACCESS_KEY = 'aws_access_key_id'
+ SECRET_KEY = 'aws_secret_access_key'
+
+ def __init__(self, environ=None, ini_parser=None):
+ if environ is None:
+ environ = os.environ
+ if ini_parser is None:
+ ini_parser = botocore.configloader.raw_config_parse
+ self._environ = environ
+ self._ini_parser = ini_parser
+
+ def load(self):
+ """
+ Look for credentials in boto config file.
+ """
+ if self.BOTO_CONFIG_ENV in self._environ:
+ potential_locations = [self._environ[self.BOTO_CONFIG_ENV]]
+ else:
+ potential_locations = self.DEFAULT_CONFIG_FILENAMES
+ for filename in potential_locations:
+ try:
+ config = self._ini_parser(filename)
+ except ConfigNotFound:
+ # Move on to the next potential config file name.
+ continue
+ if 'Credentials' in config:
+ credentials = config['Credentials']
+ if self.ACCESS_KEY in credentials:
+ logger.info(
+ "Found credentials in boto config file: %s", filename
+ )
+ access_key, secret_key = self._extract_creds_from_mapping(
+ credentials, self.ACCESS_KEY, self.SECRET_KEY
+ )
+ register_feature_id('CREDENTIALS_BOTO2_CONFIG_FILE')
+ return Credentials(
+ access_key, secret_key, method=self.METHOD
+ )
+
+
+class AssumeRoleProvider(CredentialProvider):
+ METHOD = 'assume-role'
+ # The AssumeRole provider is logically part of the SharedConfig and
+ # SharedCredentials providers. Since the purpose of the canonical name
+ # is to provide cross-sdk compatibility, calling code will need to be
+ # aware that either of those providers should be tied to the AssumeRole
+ # provider as much as possible.
+ CANONICAL_NAME = None
+ ROLE_CONFIG_VAR = 'role_arn'
+ WEB_IDENTITY_TOKE_FILE_VAR = 'web_identity_token_file'
+ # Credentials are considered expired (and will be refreshed) once the total
+ # remaining time left until the credentials expires is less than the
+ # EXPIRY_WINDOW.
+ EXPIRY_WINDOW_SECONDS = 60 * 15
+ NAMED_PROVIDER_FEATURE_MAP = {
+ 'Ec2InstanceMetadata': 'CREDENTIALS_IMDS',
+ 'Environment': 'CREDENTIALS_ENV_VARS',
+ 'EcsContainer': 'CREDENTIALS_HTTP',
+ }
+
+ def __init__(
+ self,
+ load_config,
+ client_creator,
+ cache,
+ profile_name,
+ prompter=getpass.getpass,
+ credential_sourcer=None,
+ profile_provider_builder=None,
+ ):
+ """
+ :type load_config: callable
+ :param load_config: A function that accepts no arguments, and
+ when called, will return the full configuration dictionary
+ for the session (``session.full_config``).
+
+ :type client_creator: callable
+ :param client_creator: A factory function that will create
+ a client when called. Has the same interface as
+ ``botocore.session.Session.create_client``.
+
+ :type cache: dict
+ :param cache: An object that supports ``__getitem__``,
+ ``__setitem__``, and ``__contains__``. An example
+ of this is the ``JSONFileCache`` class in the CLI.
+
+ :type profile_name: str
+ :param profile_name: The name of the profile.
+
+ :type prompter: callable
+ :param prompter: A callable that returns input provided
+ by the user (i.e raw_input, getpass.getpass, etc.).
+
+ :type credential_sourcer: CanonicalNameCredentialSourcer
+ :param credential_sourcer: A credential provider that takes a
+ configuration, which is used to provide the source credentials
+ for the STS call.
+ """
+ #: The cache used to first check for assumed credentials.
+ #: This is checked before making the AssumeRole API
+ #: calls and can be useful if you have short lived
+ #: scripts and you'd like to avoid calling AssumeRole
+ #: until the credentials are expired.
+ self.cache = cache
+ self._load_config = load_config
+ # client_creator is a callable that creates function.
+ # It's basically session.create_client
+ self._client_creator = client_creator
+ self._profile_name = profile_name
+ self._prompter = prompter
+ # The _loaded_config attribute will be populated from the
+ # load_config() function once the configuration is actually
+ # loaded. The reason we go through all this instead of just
+ # requiring that the loaded_config be passed to us is to that
+ # we can defer configuration loaded until we actually try
+ # to load credentials (as opposed to when the object is
+ # instantiated).
+ self._loaded_config = {}
+ self._credential_sourcer = credential_sourcer
+ self._profile_provider_builder = profile_provider_builder
+ self._visited_profiles = [self._profile_name]
+ self._feature_ids = set()
+
+ def load(self):
+ self._loaded_config = self._load_config()
+ profiles = self._loaded_config.get('profiles', {})
+ profile = profiles.get(self._profile_name, {})
+ if self._has_assume_role_config_vars(profile):
+ return self._load_creds_via_assume_role(self._profile_name)
+
+ def _has_assume_role_config_vars(self, profile):
+ return (
+ self.ROLE_CONFIG_VAR in profile
+ and
+ # We need to ensure this provider doesn't look at a profile when
+ # the profile has configuration for web identity. Simply relying on
+ # the order in the credential chain is insufficient as it doesn't
+ # prevent the case when we're doing an assume role chain.
+ self.WEB_IDENTITY_TOKE_FILE_VAR not in profile
+ )
+
+ def _load_creds_via_assume_role(self, profile_name):
+ role_config = self._get_role_config(profile_name)
+ source_credentials = self._resolve_source_credentials(
+ role_config, profile_name
+ )
+
+ extra_args = {}
+ role_session_name = role_config.get('role_session_name')
+ if role_session_name is not None:
+ extra_args['RoleSessionName'] = role_session_name
+
+ external_id = role_config.get('external_id')
+ if external_id is not None:
+ extra_args['ExternalId'] = external_id
+
+ mfa_serial = role_config.get('mfa_serial')
+ if mfa_serial is not None:
+ extra_args['SerialNumber'] = mfa_serial
+
+ duration_seconds = role_config.get('duration_seconds')
+ if duration_seconds is not None:
+ extra_args['DurationSeconds'] = duration_seconds
+
+ fetcher = AssumeRoleCredentialFetcher(
+ client_creator=self._client_creator,
+ source_credentials=source_credentials,
+ role_arn=role_config['role_arn'],
+ extra_args=extra_args,
+ mfa_prompter=self._prompter,
+ cache=self.cache,
+ )
+ fetcher.feature_ids = self._feature_ids.copy()
+ refresher = fetcher.fetch_credentials
+ if mfa_serial is not None:
+ refresher = create_mfa_serial_refresher(refresher)
+
+ self._feature_ids.add('CREDENTIALS_STS_ASSUME_ROLE')
+ register_feature_ids(self._feature_ids)
+ # The initial credentials are empty and the expiration time is set
+ # to now so that we can delay the call to assume role until it is
+ # strictly needed.
+ return DeferredRefreshableCredentials(
+ method=self.METHOD,
+ refresh_using=refresher,
+ time_fetcher=_local_now,
+ )
+
+ def _get_role_config(self, profile_name):
+ """Retrieves and validates the role configuration for the profile."""
+ profiles = self._loaded_config.get('profiles', {})
+
+ profile = profiles[profile_name]
+ source_profile = profile.get('source_profile')
+ role_arn = profile['role_arn']
+ credential_source = profile.get('credential_source')
+ mfa_serial = profile.get('mfa_serial')
+ external_id = profile.get('external_id')
+ role_session_name = profile.get('role_session_name')
+ duration_seconds = profile.get('duration_seconds')
+
+ role_config = {
+ 'role_arn': role_arn,
+ 'external_id': external_id,
+ 'mfa_serial': mfa_serial,
+ 'role_session_name': role_session_name,
+ 'source_profile': source_profile,
+ 'credential_source': credential_source,
+ }
+
+ if duration_seconds is not None:
+ try:
+ role_config['duration_seconds'] = int(duration_seconds)
+ except ValueError:
+ pass
+
+ # Either the credential source or the source profile must be
+ # specified, but not both.
+ if credential_source is not None and source_profile is not None:
+ raise InvalidConfigError(
+ error_msg=(
+ f'The profile "{profile_name}" contains both '
+ 'source_profile and credential_source.'
+ )
+ )
+ elif credential_source is None and source_profile is None:
+ raise PartialCredentialsError(
+ provider=self.METHOD,
+ cred_var='source_profile or credential_source',
+ )
+ elif credential_source is not None:
+ self._validate_credential_source(profile_name, credential_source)
+ else:
+ self._validate_source_profile(profile_name, source_profile)
+
+ return role_config
+
+ def _validate_credential_source(self, parent_profile, credential_source):
+ if self._credential_sourcer is None:
+ raise InvalidConfigError(
+ error_msg=(
+ f"The credential_source \"{credential_source}\" is specified "
+ f"in profile \"{parent_profile}\", "
+ f"but no source provider was configured."
+ )
+ )
+ if not self._credential_sourcer.is_supported(credential_source):
+ raise InvalidConfigError(
+ error_msg=(
+ f"The credential source \"{credential_source}\" referenced "
+ f"in profile \"{parent_profile}\" is not valid."
+ )
+ )
+
+ def _source_profile_has_credentials(self, profile):
+ return any(
+ [
+ self._has_static_credentials(profile),
+ self._has_assume_role_config_vars(profile),
+ ]
+ )
+
+ def _validate_source_profile(
+ self, parent_profile_name, source_profile_name
+ ):
+ profiles = self._loaded_config.get('profiles', {})
+ if source_profile_name not in profiles:
+ raise InvalidConfigError(
+ error_msg=(
+ f"The source_profile \"{source_profile_name}\" referenced in "
+ f"the profile \"{parent_profile_name}\" does not exist."
+ )
+ )
+
+ source_profile = profiles[source_profile_name]
+
+ # Make sure we aren't going into an infinite loop. If we haven't
+ # visited the profile yet, we're good.
+ if source_profile_name not in self._visited_profiles:
+ return
+
+ # If we have visited the profile and the profile isn't simply
+ # referencing itself, that's an infinite loop.
+ if source_profile_name != parent_profile_name:
+ raise InfiniteLoopConfigError(
+ source_profile=source_profile_name,
+ visited_profiles=self._visited_profiles,
+ )
+
+ # A profile is allowed to reference itself so that it can source
+ # static credentials and have configuration all in the same
+ # profile. This will only ever work for the top level assume
+ # role because the static credentials will otherwise take
+ # precedence.
+ if not self._has_static_credentials(source_profile):
+ raise InfiniteLoopConfigError(
+ source_profile=source_profile_name,
+ visited_profiles=self._visited_profiles,
+ )
+
+ def _has_static_credentials(self, profile):
+ static_keys = ['aws_secret_access_key', 'aws_access_key_id']
+ return any(static_key in profile for static_key in static_keys)
+
+ def _resolve_source_credentials(self, role_config, profile_name):
+ credential_source = role_config.get('credential_source')
+ if credential_source is not None:
+ self._feature_ids.add('CREDENTIALS_PROFILE_NAMED_PROVIDER')
+ return self._resolve_credentials_from_source(
+ credential_source, profile_name
+ )
+
+ source_profile = role_config['source_profile']
+ self._visited_profiles.append(source_profile)
+ self._feature_ids.add('CREDENTIALS_PROFILE_SOURCE_PROFILE')
+ return self._resolve_credentials_from_profile(source_profile)
+
+ def _resolve_credentials_from_profile(self, profile_name):
+ profiles = self._loaded_config.get('profiles', {})
+ profile = profiles[profile_name]
+ self._feature_ids.add('CREDENTIALS_PROFILE')
+ if (
+ self._has_static_credentials(profile)
+ and not self._profile_provider_builder
+ ):
+ # This is only here for backwards compatibility. If this provider
+ # isn't given a profile provider builder we still want to be able
+ # to handle the basic static credential case as we would before the
+ # profile provider builder parameter was added.
+ return self._resolve_static_credentials_from_profile(profile)
+ elif self._has_static_credentials(
+ profile
+ ) or not self._has_assume_role_config_vars(profile):
+ profile_providers = self._profile_provider_builder.providers(
+ profile_name=profile_name,
+ disable_env_vars=True,
+ )
+ profile_chain = CredentialResolver(profile_providers)
+ credentials = profile_chain.load_credentials()
+ if credentials is None:
+ error_message = (
+ 'The source profile "%s" must have credentials.'
+ )
+ raise InvalidConfigError(
+ error_msg=error_message % profile_name,
+ )
+ return credentials
+
+ return self._load_creds_via_assume_role(profile_name)
+
+ def _resolve_static_credentials_from_profile(self, profile):
+ try:
+ return Credentials(
+ access_key=profile['aws_access_key_id'],
+ secret_key=profile['aws_secret_access_key'],
+ token=profile.get('aws_session_token'),
+ )
+ except KeyError as e:
+ raise PartialCredentialsError(
+ provider=self.METHOD, cred_var=str(e)
+ )
+
+ def _resolve_credentials_from_source(
+ self, credential_source, profile_name
+ ):
+ credentials = self._credential_sourcer.source_credentials(
+ credential_source
+ )
+ if credentials is None:
+ raise CredentialRetrievalError(
+ provider=credential_source,
+ error_msg=(
+ 'No credentials found in credential_source referenced '
+ f'in profile {profile_name}'
+ ),
+ )
+ named_provider_feature_id = self.NAMED_PROVIDER_FEATURE_MAP.get(
+ credential_source
+ )
+ if named_provider_feature_id:
+ self._feature_ids.add(named_provider_feature_id)
+ return credentials
+
+
+class AssumeRoleWithWebIdentityProvider(CredentialProvider):
+ METHOD = 'assume-role-with-web-identity'
+ CANONICAL_NAME = None
+ _CONFIG_TO_ENV_VAR = {
+ 'web_identity_token_file': 'AWS_WEB_IDENTITY_TOKEN_FILE',
+ 'role_session_name': 'AWS_ROLE_SESSION_NAME',
+ 'role_arn': 'AWS_ROLE_ARN',
+ }
+
+ def __init__(
+ self,
+ load_config,
+ client_creator,
+ profile_name,
+ cache=None,
+ disable_env_vars=False,
+ token_loader_cls=None,
+ ):
+ self.cache = cache
+ self._load_config = load_config
+ self._client_creator = client_creator
+ self._profile_name = profile_name
+ self._profile_config = None
+ self._disable_env_vars = disable_env_vars
+ if token_loader_cls is None:
+ token_loader_cls = FileWebIdentityTokenLoader
+ self._token_loader_cls = token_loader_cls
+ self._feature_ids = set()
+
+ def load(self):
+ return self._assume_role_with_web_identity()
+
+ def _get_profile_config(self, key):
+ if self._profile_config is None:
+ loaded_config = self._load_config()
+ profiles = loaded_config.get('profiles', {})
+ self._profile_config = profiles.get(self._profile_name, {})
+ return self._profile_config.get(key)
+
+ def _get_env_config(self, key):
+ if self._disable_env_vars:
+ return None
+ env_key = self._CONFIG_TO_ENV_VAR.get(key)
+ if env_key and env_key in os.environ:
+ return os.environ[env_key]
+ return None
+
+ def _get_config(self, key):
+ env_value = self._get_env_config(key)
+ if env_value is not None:
+ self._feature_ids.add('CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN')
+ return env_value
+
+ config_value = self._get_profile_config(key)
+ if config_value is not None:
+ self._feature_ids.add('CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN')
+ return config_value
+
+ return None
+
+ def _assume_role_with_web_identity(self):
+ token_path = self._get_config('web_identity_token_file')
+ if not token_path:
+ return None
+ token_loader = self._token_loader_cls(token_path)
+
+ role_arn = self._get_config('role_arn')
+ if not role_arn:
+ error_msg = (
+ 'The provided profile or the current environment is '
+ 'configured to assume role with web identity but has no '
+ 'role ARN configured. Ensure that the profile has the role_arn'
+ 'configuration set or the AWS_ROLE_ARN env var is set.'
+ )
+ raise InvalidConfigError(error_msg=error_msg)
+
+ extra_args = {}
+ role_session_name = self._get_config('role_session_name')
+ if role_session_name is not None:
+ extra_args['RoleSessionName'] = role_session_name
+
+ fetcher = AssumeRoleWithWebIdentityCredentialFetcher(
+ client_creator=self._client_creator,
+ web_identity_token_loader=token_loader,
+ role_arn=role_arn,
+ extra_args=extra_args,
+ cache=self.cache,
+ )
+ fetcher.feature_ids = self._feature_ids.copy()
+
+ self._feature_ids.add('CREDENTIALS_STS_ASSUME_ROLE_WEB_ID')
+ register_feature_ids(self._feature_ids)
+ # The initial credentials are empty and the expiration time is set
+ # to now so that we can delay the call to assume role until it is
+ # strictly needed.
+ return DeferredRefreshableCredentials(
+ method=self.METHOD,
+ refresh_using=fetcher.fetch_credentials,
+ )
+
+
+class CanonicalNameCredentialSourcer:
+ def __init__(self, providers):
+ self._providers = providers
+
+ def is_supported(self, source_name):
+ """Validates a given source name.
+
+ :type source_name: str
+ :param source_name: The value of credential_source in the config
+ file. This is the canonical name of the credential provider.
+
+ :rtype: bool
+ :returns: True if the credential provider is supported,
+ False otherwise.
+ """
+ return source_name in [p.CANONICAL_NAME for p in self._providers]
+
+ def source_credentials(self, source_name):
+ """Loads source credentials based on the provided configuration.
+
+ :type source_name: str
+ :param source_name: The value of credential_source in the config
+ file. This is the canonical name of the credential provider.
+
+ :rtype: Credentials
+ """
+ source = self._get_provider(source_name)
+ if isinstance(source, CredentialResolver):
+ return source.load_credentials()
+ return source.load()
+
+ def _get_provider(self, canonical_name):
+ """Return a credential provider by its canonical name.
+
+ :type canonical_name: str
+ :param canonical_name: The canonical name of the provider.
+
+ :raises UnknownCredentialError: Raised if no
+ credential provider by the provided name
+ is found.
+ """
+ provider = self._get_provider_by_canonical_name(canonical_name)
+
+ # The AssumeRole provider should really be part of the SharedConfig
+ # provider rather than being its own thing, but it is not. It is
+ # effectively part of both the SharedConfig provider and the
+ # SharedCredentials provider now due to the way it behaves.
+ # Therefore if we want either of those providers we should return
+ # the AssumeRole provider with it.
+ if canonical_name.lower() in ['sharedconfig', 'sharedcredentials']:
+ assume_role_provider = self._get_provider_by_method('assume-role')
+ if assume_role_provider is not None:
+ # The SharedConfig or SharedCredentials provider may not be
+ # present if it was removed for some reason, but the
+ # AssumeRole provider could still be present. In that case,
+ # return the assume role provider by itself.
+ if provider is None:
+ return assume_role_provider
+
+ # If both are present, return them both as a
+ # CredentialResolver so that calling code can treat them as
+ # a single entity.
+ return CredentialResolver([assume_role_provider, provider])
+
+ if provider is None:
+ raise UnknownCredentialError(name=canonical_name)
+
+ return provider
+
+ def _get_provider_by_canonical_name(self, canonical_name):
+ """Return a credential provider by its canonical name.
+
+ This function is strict, it does not attempt to address
+ compatibility issues.
+ """
+ for provider in self._providers:
+ name = provider.CANONICAL_NAME
+ # Canonical names are case-insensitive
+ if name and name.lower() == canonical_name.lower():
+ return provider
+
+ def _get_provider_by_method(self, method):
+ """Return a credential provider by its METHOD name."""
+ for provider in self._providers:
+ if provider.METHOD == method:
+ return provider
+
+
+class ContainerProvider(CredentialProvider):
+ METHOD = 'container-role'
+ CANONICAL_NAME = 'EcsContainer'
+ ENV_VAR = 'AWS_CONTAINER_CREDENTIALS_RELATIVE_URI'
+ ENV_VAR_FULL = 'AWS_CONTAINER_CREDENTIALS_FULL_URI'
+ ENV_VAR_AUTH_TOKEN = 'AWS_CONTAINER_AUTHORIZATION_TOKEN'
+ ENV_VAR_AUTH_TOKEN_FILE = 'AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE'
+
+ def __init__(self, environ=None, fetcher=None):
+ if environ is None:
+ environ = os.environ
+ if fetcher is None:
+ fetcher = ContainerMetadataFetcher()
+ self._environ = environ
+ self._fetcher = fetcher
+
+ def load(self):
+ # This cred provider is only triggered if the self.ENV_VAR is set,
+ # which only happens if you opt into this feature.
+ if self.ENV_VAR in self._environ or self.ENV_VAR_FULL in self._environ:
+ return self._retrieve_or_fail()
+
+ def _retrieve_or_fail(self):
+ if self._provided_relative_uri():
+ full_uri = self._fetcher.full_url(self._environ[self.ENV_VAR])
+ else:
+ full_uri = self._environ[self.ENV_VAR_FULL]
+ fetcher = self._create_fetcher(full_uri)
+ creds = fetcher()
+ return RefreshableCredentials(
+ access_key=creds['access_key'],
+ secret_key=creds['secret_key'],
+ token=creds['token'],
+ method=self.METHOD,
+ expiry_time=_parse_if_needed(creds['expiry_time']),
+ refresh_using=fetcher,
+ account_id=creds.get('account_id'),
+ )
+
+ def _build_headers(self):
+ auth_token = None
+ if self.ENV_VAR_AUTH_TOKEN_FILE in self._environ:
+ auth_token_file_path = self._environ[self.ENV_VAR_AUTH_TOKEN_FILE]
+ with open(auth_token_file_path) as token_file:
+ auth_token = token_file.read()
+ elif self.ENV_VAR_AUTH_TOKEN in self._environ:
+ auth_token = self._environ[self.ENV_VAR_AUTH_TOKEN]
+ if auth_token is not None:
+ self._validate_auth_token(auth_token)
+ return {'Authorization': auth_token}
+
+ def _validate_auth_token(self, auth_token):
+ if "\r" in auth_token or "\n" in auth_token:
+ raise ValueError("Auth token value is not a legal header value")
+
+ def _create_fetcher(self, full_uri, *args, **kwargs):
+ def fetch_creds():
+ try:
+ headers = self._build_headers()
+ response = self._fetcher.retrieve_full_uri(
+ full_uri, headers=headers
+ )
+ register_feature_id('CREDENTIALS_HTTP')
+ except MetadataRetrievalError as e:
+ logger.debug(
+ "Error retrieving container metadata: %s", e, exc_info=True
+ )
+ raise CredentialRetrievalError(
+ provider=self.METHOD, error_msg=str(e)
+ )
+ return {
+ 'access_key': response['AccessKeyId'],
+ 'secret_key': response['SecretAccessKey'],
+ 'token': response['Token'],
+ 'expiry_time': response['Expiration'],
+ 'account_id': response.get('AccountId'),
+ }
+
+ return fetch_creds
+
+ def _provided_relative_uri(self):
+ return self.ENV_VAR in self._environ
+
+
+class CredentialResolver:
+ def __init__(self, providers):
+ """
+
+ :param providers: A list of ``CredentialProvider`` instances.
+
+ """
+ self.providers = providers
+
+ def insert_before(self, name, credential_provider):
+ """
+ Inserts a new instance of ``CredentialProvider`` into the chain that
+ will be tried before an existing one.
+
+ :param name: The short name of the credentials you'd like to insert the
+ new credentials before. (ex. ``env`` or ``config``). Existing names
+ & ordering can be discovered via ``self.available_methods``.
+ :type name: string
+
+ :param cred_instance: An instance of the new ``Credentials`` object
+ you'd like to add to the chain.
+ :type cred_instance: A subclass of ``Credentials``
+ """
+ try:
+ offset = [p.METHOD for p in self.providers].index(name)
+ except ValueError:
+ raise UnknownCredentialError(name=name)
+ self.providers.insert(offset, credential_provider)
+
+ def insert_after(self, name, credential_provider):
+ """
+ Inserts a new type of ``Credentials`` instance into the chain that will
+ be tried after an existing one.
+
+ :param name: The short name of the credentials you'd like to insert the
+ new credentials after. (ex. ``env`` or ``config``). Existing names
+ & ordering can be discovered via ``self.available_methods``.
+ :type name: string
+
+ :param cred_instance: An instance of the new ``Credentials`` object
+ you'd like to add to the chain.
+ :type cred_instance: A subclass of ``Credentials``
+ """
+ offset = self._get_provider_offset(name)
+ self.providers.insert(offset + 1, credential_provider)
+
+ def remove(self, name):
+ """
+ Removes a given ``Credentials`` instance from the chain.
+
+ :param name: The short name of the credentials instance to remove.
+ :type name: string
+ """
+ available_methods = [p.METHOD for p in self.providers]
+ if name not in available_methods:
+ # It's not present. Fail silently.
+ return
+
+ offset = available_methods.index(name)
+ self.providers.pop(offset)
+
+ def get_provider(self, name):
+ """Return a credential provider by name.
+
+ :type name: str
+ :param name: The name of the provider.
+
+ :raises UnknownCredentialError: Raised if no
+ credential provider by the provided name
+ is found.
+ """
+ return self.providers[self._get_provider_offset(name)]
+
+ def _get_provider_offset(self, name):
+ try:
+ return [p.METHOD for p in self.providers].index(name)
+ except ValueError:
+ raise UnknownCredentialError(name=name)
+
+ def load_credentials(self):
+ """
+ Goes through the credentials chain, returning the first ``Credentials``
+ that could be loaded.
+ """
+ # First provider to return a non-None response wins.
+ for provider in self.providers:
+ logger.debug("Looking for credentials via: %s", provider.METHOD)
+ creds = provider.load()
+ if creds is not None:
+ return creds
+
+ # If we got here, no credentials could be found.
+ # This feels like it should be an exception, but historically, ``None``
+ # is returned.
+ #
+ # +1
+ # -js
+ return None
+
+
+class SSOCredentialFetcher(CachedCredentialFetcher):
+ _UTC_DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
+
+ def __init__(
+ self,
+ start_url,
+ sso_region,
+ role_name,
+ account_id,
+ client_creator,
+ token_loader=None,
+ cache=None,
+ expiry_window_seconds=None,
+ token_provider=None,
+ sso_session_name=None,
+ time_fetcher=_local_now,
+ ):
+ self._client_creator = client_creator
+ self._sso_region = sso_region
+ self._role_name = role_name
+ self._account_id = account_id
+ self._start_url = start_url
+ self._token_loader = token_loader
+ self._token_provider = token_provider
+ self._sso_session_name = sso_session_name
+ self._time_fetcher = time_fetcher
+ super().__init__(cache, expiry_window_seconds)
+
+ def _create_cache_key(self):
+ """Create a predictable cache key for the current configuration.
+
+ The cache key is intended to be compatible with file names.
+ """
+ args = {
+ 'roleName': self._role_name,
+ 'accountId': self._account_id,
+ }
+ if self._sso_session_name:
+ args['sessionName'] = self._sso_session_name
+ else:
+ args['startUrl'] = self._start_url
+ # NOTE: It would be good to hoist this cache key construction logic
+ # into the CachedCredentialFetcher class as we should be consistent.
+ # Unfortunately, the current assume role fetchers that sub class don't
+ # pass separators resulting in non-minified JSON. In the long term,
+ # all fetchers should use the below caching scheme.
+ args = json.dumps(args, sort_keys=True, separators=(',', ':'))
+ argument_hash = sha1(args.encode('utf-8')).hexdigest()
+ return self._make_file_safe(argument_hash)
+
+ def _parse_timestamp(self, timestamp_ms):
+ # fromtimestamp expects seconds so: milliseconds / 1000 = seconds
+ timestamp_seconds = timestamp_ms / 1000.0
+ timestamp = datetime.datetime.fromtimestamp(timestamp_seconds, tzutc())
+ return timestamp.strftime(self._UTC_DATE_FORMAT)
+
+ def _get_credentials(self):
+ """Get credentials by calling SSO get role credentials."""
+ config = Config(
+ signature_version=UNSIGNED,
+ region_name=self._sso_region,
+ )
+ client = self._client_creator('sso', config=config)
+ if self._token_provider:
+ initial_token_data = self._token_provider.load_token()
+ token = initial_token_data.get_frozen_token().token
+ else:
+ token_dict = self._token_loader(self._start_url)
+ token = token_dict['accessToken']
+
+ # raise an UnauthorizedSSOTokenError if the loaded legacy token
+ # is expired to save a call to GetRoleCredentials with an
+ # expired token.
+ expiration = dateutil.parser.parse(token_dict['expiresAt'])
+ remaining = total_seconds(expiration - self._time_fetcher())
+ if remaining <= 0:
+ raise UnauthorizedSSOTokenError()
+
+ kwargs = {
+ 'roleName': self._role_name,
+ 'accountId': self._account_id,
+ 'accessToken': token,
+ }
+ try:
+ register_feature_ids(self.feature_ids)
+ response = client.get_role_credentials(**kwargs)
+ except client.exceptions.UnauthorizedException:
+ raise UnauthorizedSSOTokenError()
+ credentials = response['roleCredentials']
+
+ credentials = {
+ 'ProviderType': 'sso',
+ 'Credentials': {
+ 'AccessKeyId': credentials['accessKeyId'],
+ 'SecretAccessKey': credentials['secretAccessKey'],
+ 'SessionToken': credentials['sessionToken'],
+ 'Expiration': self._parse_timestamp(credentials['expiration']),
+ 'AccountId': self._account_id,
+ },
+ }
+ return credentials
+
+
+class SSOProvider(CredentialProvider):
+ METHOD = 'sso'
+
+ _SSO_TOKEN_CACHE_DIR = os.path.expanduser(
+ os.path.join('~', '.aws', 'sso', 'cache')
+ )
+ _PROFILE_REQUIRED_CONFIG_VARS = (
+ 'sso_role_name',
+ 'sso_account_id',
+ )
+ _SSO_REQUIRED_CONFIG_VARS = (
+ 'sso_start_url',
+ 'sso_region',
+ )
+ _ALL_REQUIRED_CONFIG_VARS = (
+ _PROFILE_REQUIRED_CONFIG_VARS + _SSO_REQUIRED_CONFIG_VARS
+ )
+
+ def __init__(
+ self,
+ load_config,
+ client_creator,
+ profile_name,
+ cache=None,
+ token_cache=None,
+ token_provider=None,
+ ):
+ if token_cache is None:
+ token_cache = JSONFileCache(self._SSO_TOKEN_CACHE_DIR)
+ self._token_cache = token_cache
+ self._token_provider = token_provider
+ if cache is None:
+ cache = {}
+ self.cache = cache
+ self._load_config = load_config
+ self._client_creator = client_creator
+ self._profile_name = profile_name
+ self._feature_ids = set()
+
+ def _load_sso_config(self):
+ loaded_config = self._load_config()
+ profiles = loaded_config.get('profiles', {})
+ profile_name = self._profile_name
+ profile_config = profiles.get(self._profile_name, {})
+ sso_sessions = loaded_config.get('sso_sessions', {})
+
+ # Role name & Account ID indicate the cred provider should be used
+ if all(
+ c not in profile_config for c in self._PROFILE_REQUIRED_CONFIG_VARS
+ ):
+ return None
+
+ resolved_config, extra_reqs = self._resolve_sso_session_reference(
+ profile_config, sso_sessions
+ )
+
+ config = {}
+ missing_config_vars = []
+ all_required_configs = self._ALL_REQUIRED_CONFIG_VARS + extra_reqs
+ for config_var in all_required_configs:
+ if config_var in resolved_config:
+ config[config_var] = resolved_config[config_var]
+ else:
+ missing_config_vars.append(config_var)
+
+ if missing_config_vars:
+ missing = ', '.join(missing_config_vars)
+ raise InvalidConfigError(
+ error_msg=(
+ f'The profile "{profile_name}" is configured to use SSO '
+ f'but is missing required configuration: {missing}'
+ )
+ )
+ return config
+
+ def _resolve_sso_session_reference(self, profile_config, sso_sessions):
+ sso_session_name = profile_config.get('sso_session')
+ if sso_session_name is None:
+ # No reference to resolve, proceed with legacy flow
+ return profile_config, ()
+
+ if sso_session_name not in sso_sessions:
+ error_msg = f'The specified sso-session does not exist: "{sso_session_name}"'
+ raise InvalidConfigError(error_msg=error_msg)
+
+ config = profile_config.copy()
+ session = sso_sessions[sso_session_name]
+ for config_var, val in session.items():
+ # Validate any keys referenced in both profile and sso_session match
+ if config.get(config_var, val) != val:
+ error_msg = (
+ f"The value for {config_var} is inconsistent between "
+ f"profile ({config[config_var]}) and sso-session ({val})."
+ )
+ raise InvalidConfigError(error_msg=error_msg)
+ config[config_var] = val
+ return config, ('sso_session',)
+
+ def load(self):
+ sso_config = self._load_sso_config()
+ if not sso_config:
+ return None
+
+ fetcher_kwargs = {
+ 'start_url': sso_config['sso_start_url'],
+ 'sso_region': sso_config['sso_region'],
+ 'role_name': sso_config['sso_role_name'],
+ 'account_id': sso_config['sso_account_id'],
+ 'client_creator': self._client_creator,
+ 'token_loader': SSOTokenLoader(cache=self._token_cache),
+ 'cache': self.cache,
+ }
+ sso_session_in_config = 'sso_session' in sso_config
+ if sso_session_in_config:
+ fetcher_kwargs['sso_session_name'] = sso_config['sso_session']
+ fetcher_kwargs['token_provider'] = self._token_provider
+ self._feature_ids.add('CREDENTIALS_PROFILE_SSO')
+ else:
+ self._feature_ids.add('CREDENTIALS_PROFILE_SSO_LEGACY')
+
+ sso_fetcher = SSOCredentialFetcher(**fetcher_kwargs)
+ sso_fetcher.feature_ids = self._feature_ids.copy()
+
+ if sso_session_in_config:
+ self._feature_ids.add('CREDENTIALS_SSO')
+ else:
+ self._feature_ids.add('CREDENTIALS_SSO_LEGACY')
+
+ register_feature_ids(self._feature_ids)
+ return DeferredRefreshableCredentials(
+ method=self.METHOD,
+ refresh_using=sso_fetcher.fetch_credentials,
+ )
+
+
+def _base64_url_encode_no_padding(data):
+ return base64.urlsafe_b64encode(data).rstrip(b'=').decode('ascii')
+
+
+def _build_dpop_header(private_key, uri, uid=None, ts=None):
+ if EC is None:
+ raise MissingDependencyException(
+ msg=(
+ "This operation requires an additional dependency. You"
+ " will need to pip install \"botocore[crt]\" before proceeding."
+ )
+ )
+ x, y = private_key.get_public_coords()
+ jwk = {
+ "kty": "EC",
+ "x": _base64_url_encode_no_padding(x),
+ "y": _base64_url_encode_no_padding(y),
+ "crv": "P-256",
+ }
+
+ header = {
+ "typ": "dpop+jwt",
+ "alg": "ES256",
+ "jwk": jwk,
+ }
+
+ payload = {
+ "htm": "POST",
+ "htu": uri,
+ "iat": ts or int(time.time()),
+ "jti": uid or str(uuid.uuid4()),
+ }
+ header_b64 = _base64_url_encode_no_padding(
+ json.dumps(header, separators=(',', ':')).encode()
+ )
+ payload_b64 = _base64_url_encode_no_padding(
+ json.dumps(payload, separators=(',', ':')).encode()
+ )
+ signing_input = f"{header_b64}.{payload_b64}".encode()
+ signature = private_key.sign(sha256(signing_input).digest())
+ signature_bytes = EC.decode_der_signature_to_padded_pair(
+ signature, pad_to=32
+ )
+ signature_b64 = _base64_url_encode_no_padding(signature_bytes)
+
+ return f"{header_b64}.{payload_b64}.{signature_b64}"
+
+
+def _build_add_dpop_header_handler(private_key):
+ """Builds a before-call handler for calculating and setting the DPoP header"""
+
+ def _add_dpop_header_handler(**kwargs):
+ kwargs['params']['headers']['DPoP'] = _build_dpop_header(
+ private_key, kwargs['params']['url']
+ )
+
+ return _add_dpop_header_handler
+
+
+class LoginCredentialFetcher:
+ """
+ Converts login access tokens from the cached token to
+ credentials, and supports refreshing them.
+ """
+
+ _REFRESH_THRESHOLD = 5 * 60
+ _REQUIRED_TOKEN_FIELDS = (
+ 'accessToken',
+ 'refreshToken',
+ 'dpopKey',
+ 'clientId',
+ )
+
+ def __init__(
+ self,
+ session_name,
+ token_loader,
+ client_creator,
+ time_fetcher=_local_now,
+ feature_ids=None,
+ ):
+ self._session_name = session_name
+ self._token_loader = token_loader
+ self._client_creator = client_creator
+ self._time_fetcher = time_fetcher
+ if feature_ids is None:
+ feature_ids = set()
+ self.feature_ids = feature_ids
+
+ def load_cached_credentials(self):
+ """Loads cached credentials without checking their expiry."""
+ token = self._token_loader.load_token(self._session_name)
+
+ if token is None:
+ raise LoginTokenLoadError(
+ error_msg='Unable to load a existing login session for session '
+ f'{self._session_name}. Please reauthenticate with '
+ "'aws login'.",
+ )
+
+ missing_fields = [
+ key for key in self._REQUIRED_TOKEN_FIELDS if key not in token
+ ]
+ if missing_fields:
+ raise LoginTokenLoadError(
+ error_msg=f'Failed to load access token from token cache, missing required fields: {", ".join(missing_fields)}.'
+ )
+
+ return self._token_to_credentials(token)
+
+ def refresh_credentials(self):
+ """Refreshes login credentials, including saving them to the cache."""
+ if self.feature_ids:
+ register_feature_ids(self.feature_ids)
+ # Reload the token from disk, we need the refresh info
+ token = self._token_loader.load_token(self._session_name)
+ private_key = self._load_private_key(token)
+
+ # Check if token has already been refreshed and is still valid
+ if (
+ token
+ and 'accessToken' in token
+ and 'expiresAt' in token['accessToken']
+ ):
+ expiry_time = _parse_if_needed(token['accessToken']['expiresAt'])
+ remaining_time = total_seconds(expiry_time - self._time_fetcher())
+ if remaining_time > self._REFRESH_THRESHOLD:
+ return self._token_to_credentials(token)
+
+ config = botocore.config.Config(
+ signature_version=botocore.UNSIGNED,
+ )
+ client = self._client_creator(
+ 'signin',
+ config=config,
+ )
+
+ client.meta.events.register(
+ 'before-call.signin.CreateOAuth2Token',
+ _build_add_dpop_header_handler(private_key),
+ )
+
+ try:
+ response = client.create_o_auth2_token(
+ tokenInput={
+ 'clientId': token['clientId'],
+ 'refreshToken': token['refreshToken'],
+ 'grantType': 'refresh_token',
+ },
+ )
+ except client.exceptions.AccessDeniedException as e:
+ error_type = e.response.get('error', '')
+ if error_type in ('TOKEN_EXPIRED', 'USER_CREDENTIALS_CHANGED'):
+ raise LoginRefreshRequired() from e
+ elif error_type == 'INSUFFICIENT_PERMISSIONS':
+ raise LoginInsufficientPermissions() from e
+ raise LoginError() from e
+
+ if response is None or 'tokenOutput' not in response:
+ raise LoginTokenLoadError(
+ error_msg=(
+ "Unable to refresh access token due to an invalid service response. "
+ "Please try running 'aws login' again. If the issue persists, there "
+ "may be a temporary signin service problem."
+ )
+ )
+
+ output = response.get('tokenOutput')
+
+ expires_timestamp = self._time_fetcher().astimezone(
+ tzutc()
+ ) + datetime.timedelta(seconds=output['expiresIn'])
+
+ # Overwrite token with refreshed fields
+ token.update(
+ {
+ 'accessToken': {
+ 'accessKeyId': output['accessToken']['accessKeyId'],
+ 'secretAccessKey': output['accessToken'][
+ 'secretAccessKey'
+ ],
+ 'sessionToken': output['accessToken']['sessionToken'],
+ 'accountId': token['accessToken']['accountId'],
+ 'expiresAt': expires_timestamp.strftime(
+ '%Y-%m-%dT%H:%M:%SZ'
+ ),
+ },
+ 'refreshToken': output['refreshToken'],
+ }
+ )
+ self._token_loader.save_token(self._session_name, token)
+
+ return self._token_to_credentials(token)
+
+ @staticmethod
+ def _token_to_credentials(token):
+ return {
+ 'access_key': token['accessToken']['accessKeyId'],
+ 'secret_key': token['accessToken']['secretAccessKey'],
+ 'token': token['accessToken']['sessionToken'],
+ 'expiry_time': token['accessToken']['expiresAt'],
+ 'account_id': token['accessToken']['accountId'],
+ }
+
+ @staticmethod
+ def _load_private_key(token):
+ if 'dpopKey' not in token:
+ raise LoginTokenLoadError(
+ error_msg='Private key not found in cached token.'
+ )
+
+ # Remove the PEM header and footer lines
+ lines = token['dpopKey'].splitlines()
+ content_lines = [
+ line
+ for line in lines
+ if not line.startswith('-----BEGIN')
+ and not line.startswith('-----END')
+ ]
+
+ # strip should handle the optional newline at the end as well
+ contents = ''.join(content_lines).strip()
+
+ try:
+ return EC.new_key_from_der_data(base64.b64decode(contents))
+ except ValueError as e:
+ raise LoginTokenLoadError(
+ error_msg='Unable to load private key from cached token.'
+ ) from e
+
+
+class LoginProvider(CredentialProvider):
+ METHOD = 'login'
+
+ def __init__(
+ self,
+ load_config,
+ client_creator,
+ profile_name,
+ token_cache=None,
+ ):
+ super().__init__()
+ if token_cache is None:
+ token_cache = JSONFileCache(get_login_token_cache_directory())
+ self._token_cache = token_cache
+
+ self._load_config = load_config
+ self._client_creator = client_creator
+ self._profile_name = profile_name
+ self._feature_ids = {'CREDENTIALS_PROFILE_LOGIN', 'CREDENTIALS_LOGIN'}
+
+ def load(self):
+ loaded_config = self._load_config()
+ profiles = loaded_config.get('profiles', {})
+ profile_config = profiles.get(self._profile_name, {})
+
+ if 'login_session' not in profile_config:
+ return None
+
+ if EC is None:
+ raise MissingDependencyException(
+ msg=(
+ "Using the login credential provider requires an "
+ "additional dependency. You will need to pip install "
+ "\"botocore[crt]\" before proceeding."
+ )
+ )
+
+ fetcher = LoginCredentialFetcher(
+ session_name=profile_config['login_session'],
+ token_loader=LoginTokenLoader(self._token_cache),
+ client_creator=self._client_creator,
+ time_fetcher=_local_now,
+ feature_ids=self._feature_ids,
+ )
+
+ register_feature_ids(self._feature_ids)
+
+ # Return the current cached credentials initially,
+ # regardless if they're expired
+ cached_credentials = fetcher.load_cached_credentials()
+
+ return RefreshableCredentials(
+ access_key=cached_credentials['access_key'],
+ secret_key=cached_credentials['secret_key'],
+ token=cached_credentials['token'],
+ expiry_time=_parse_if_needed(cached_credentials['expiry_time']),
+ account_id=cached_credentials['account_id'],
+ method=self.METHOD,
+ refresh_using=fetcher.refresh_credentials,
+ time_fetcher=_local_now,
+ )
diff --git a/py311/lib/python3.11/site-packages/botocore/crt/__init__.py b/py311/lib/python3.11/site-packages/botocore/crt/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..952ebf34cc37bde64e7fcd14a9b252a205429f47
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/crt/__init__.py
@@ -0,0 +1,27 @@
+# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+# A list of auth types supported by the signers in botocore/crt/auth.py. This
+# should always match the keys of botocore.crt.auth.CRT_AUTH_TYPE_MAPS. The
+# information is duplicated here so that it can be accessed in environments
+# where `awscrt` is not present and any import from botocore.crt.auth would
+# fail.
+CRT_SUPPORTED_AUTH_TYPES = (
+ 'v4',
+ 'v4-query',
+ 'v4a',
+ 's3v4',
+ 's3v4-query',
+ 's3v4a',
+ 's3v4a-query',
+)
diff --git a/py311/lib/python3.11/site-packages/botocore/crt/auth.py b/py311/lib/python3.11/site-packages/botocore/crt/auth.py
new file mode 100644
index 0000000000000000000000000000000000000000..e36730e07ee60c578ac731e7dd0db856bb4f44a2
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/crt/auth.py
@@ -0,0 +1,629 @@
+# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+from io import BytesIO
+
+from botocore.auth import (
+ SIGNED_HEADERS_BLACKLIST,
+ STREAMING_UNSIGNED_PAYLOAD_TRAILER,
+ UNSIGNED_PAYLOAD,
+ BaseSigner,
+ _get_body_as_dict,
+ _host_from_url,
+)
+from botocore.compat import (
+ HTTPHeaders,
+ awscrt,
+ get_current_datetime,
+ parse_qs,
+ urlsplit,
+ urlunsplit,
+)
+from botocore.exceptions import NoCredentialsError
+from botocore.useragent import register_feature_id
+from botocore.utils import percent_encode_sequence
+
+
+class CrtSigV4Auth(BaseSigner):
+ REQUIRES_REGION = True
+ _PRESIGNED_HEADERS_BLOCKLIST = [
+ 'Authorization',
+ 'X-Amz-Date',
+ 'X-Amz-Content-SHA256',
+ 'X-Amz-Security-Token',
+ ]
+ _SIGNATURE_TYPE = awscrt.auth.AwsSignatureType.HTTP_REQUEST_HEADERS
+ _USE_DOUBLE_URI_ENCODE = True
+ _SHOULD_NORMALIZE_URI_PATH = True
+
+ def __init__(self, credentials, service_name, region_name):
+ self.credentials = credentials
+ self._service_name = service_name
+ self._region_name = region_name
+ self._expiration_in_seconds = None
+
+ def _is_streaming_checksum_payload(self, request):
+ checksum_context = request.context.get('checksum', {})
+ algorithm = checksum_context.get('request_algorithm')
+ return isinstance(algorithm, dict) and algorithm.get('in') == 'trailer'
+
+ def add_auth(self, request):
+ if self.credentials is None:
+ raise NoCredentialsError()
+
+ datetime_now = get_current_datetime(remove_tzinfo=False)
+
+ # Use existing 'X-Amz-Content-SHA256' header if able
+ existing_sha256 = self._get_existing_sha256(request)
+
+ self._modify_request_before_signing(request)
+
+ credentials_provider = awscrt.auth.AwsCredentialsProvider.new_static(
+ access_key_id=self.credentials.access_key,
+ secret_access_key=self.credentials.secret_key,
+ session_token=self.credentials.token,
+ )
+
+ if self._is_streaming_checksum_payload(request):
+ explicit_payload = STREAMING_UNSIGNED_PAYLOAD_TRAILER
+ elif self._should_sha256_sign_payload(request):
+ if existing_sha256:
+ explicit_payload = existing_sha256
+ else:
+ explicit_payload = None # to be calculated during signing
+ else:
+ explicit_payload = UNSIGNED_PAYLOAD
+
+ if self._should_add_content_sha256_header(explicit_payload):
+ body_header = (
+ awscrt.auth.AwsSignedBodyHeaderType.X_AMZ_CONTENT_SHA_256
+ )
+ else:
+ body_header = awscrt.auth.AwsSignedBodyHeaderType.NONE
+
+ signing_config = awscrt.auth.AwsSigningConfig(
+ algorithm=awscrt.auth.AwsSigningAlgorithm.V4,
+ signature_type=self._SIGNATURE_TYPE,
+ credentials_provider=credentials_provider,
+ region=self._region_name,
+ service=self._service_name,
+ date=datetime_now,
+ should_sign_header=self._should_sign_header,
+ use_double_uri_encode=self._USE_DOUBLE_URI_ENCODE,
+ should_normalize_uri_path=self._SHOULD_NORMALIZE_URI_PATH,
+ signed_body_value=explicit_payload,
+ signed_body_header_type=body_header,
+ expiration_in_seconds=self._expiration_in_seconds,
+ )
+ crt_request = self._crt_request_from_aws_request(request)
+ future = awscrt.auth.aws_sign_request(crt_request, signing_config)
+ future.result()
+ self._apply_signing_changes(request, crt_request)
+
+ def _crt_request_from_aws_request(self, aws_request):
+ url_parts = urlsplit(aws_request.url)
+ crt_path = url_parts.path if url_parts.path else '/'
+ if aws_request.params:
+ array = []
+ for param, value in aws_request.params.items():
+ value = str(value)
+ array.append(f'{param}={value}')
+ crt_path = crt_path + '?' + '&'.join(array)
+ elif url_parts.query:
+ crt_path = f'{crt_path}?{url_parts.query}'
+
+ crt_headers = awscrt.http.HttpHeaders(aws_request.headers.items())
+
+ # CRT requires body (if it exists) to be an I/O stream.
+ crt_body_stream = None
+ if aws_request.body:
+ if hasattr(aws_request.body, 'seek'):
+ crt_body_stream = aws_request.body
+ else:
+ crt_body_stream = BytesIO(aws_request.body)
+
+ crt_request = awscrt.http.HttpRequest(
+ method=aws_request.method,
+ path=crt_path,
+ headers=crt_headers,
+ body_stream=crt_body_stream,
+ )
+ return crt_request
+
+ def _apply_signing_changes(self, aws_request, signed_crt_request):
+ # Apply changes from signed CRT request to the AWSRequest
+ aws_request.headers = HTTPHeaders.from_pairs(
+ list(signed_crt_request.headers)
+ )
+
+ def _should_sign_header(self, name, **kwargs):
+ return name.lower() not in SIGNED_HEADERS_BLACKLIST
+
+ def _modify_request_before_signing(self, request):
+ # This could be a retry. Make sure the previous
+ # authorization headers are removed first.
+ for h in self._PRESIGNED_HEADERS_BLOCKLIST:
+ if h in request.headers:
+ del request.headers[h]
+ # If necessary, add the host header
+ if 'host' not in request.headers:
+ request.headers['host'] = _host_from_url(request.url)
+
+ def _get_existing_sha256(self, request):
+ return request.headers.get('X-Amz-Content-SHA256')
+
+ def _should_sha256_sign_payload(self, request):
+ # Payloads will always be signed over insecure connections.
+ if not request.url.startswith('https'):
+ return True
+
+ # Certain operations may have payload signing disabled by default.
+ # Since we don't have access to the operation model, we pass in this
+ # bit of metadata through the request context.
+ return request.context.get('payload_signing_enabled', True)
+
+ def _should_add_content_sha256_header(self, explicit_payload):
+ # only add X-Amz-Content-SHA256 header if payload is explicitly set
+ return explicit_payload is not None
+
+
+class CrtS3SigV4Auth(CrtSigV4Auth):
+ # For S3, we do not normalize the path.
+ _USE_DOUBLE_URI_ENCODE = False
+ _SHOULD_NORMALIZE_URI_PATH = False
+
+ def _get_existing_sha256(self, request):
+ # always recalculate
+ return None
+
+ def _should_sha256_sign_payload(self, request):
+ # S3 allows optional body signing, so to minimize the performance
+ # impact, we opt to not SHA256 sign the body on streaming uploads,
+ # provided that we're on https.
+ client_config = request.context.get('client_config')
+ s3_config = getattr(client_config, 's3', None)
+
+ # The config could be None if it isn't set, or if the customer sets it
+ # to None.
+ if s3_config is None:
+ s3_config = {}
+
+ # The explicit configuration takes precedence over any implicit
+ # configuration.
+ sign_payload = s3_config.get('payload_signing_enabled', None)
+ if sign_payload is not None:
+ return sign_payload
+
+ # We require that both a checksum be present and https be enabled
+ # to implicitly disable body signing. The combination of TLS and
+ # a checksum is sufficiently secure and durable for us to be
+ # confident in the request without body signing.
+ checksum_header = 'Content-MD5'
+ checksum_context = request.context.get('checksum', {})
+ algorithm = checksum_context.get('request_algorithm')
+ if isinstance(algorithm, dict) and algorithm.get('in') == 'header':
+ checksum_header = algorithm['name']
+ if (
+ not request.url.startswith('https')
+ or checksum_header not in request.headers
+ ):
+ return True
+
+ # If the input is streaming we disable body signing by default.
+ if request.context.get('has_streaming_input', False):
+ return False
+
+ # If the S3-specific checks had no results, delegate to the generic
+ # checks.
+ return super()._should_sha256_sign_payload(request)
+
+ def _should_add_content_sha256_header(self, explicit_payload):
+ # Always add X-Amz-Content-SHA256 header
+ return True
+
+
+class CrtSigV4AsymAuth(BaseSigner):
+ REQUIRES_REGION = True
+ _PRESIGNED_HEADERS_BLOCKLIST = [
+ 'Authorization',
+ 'X-Amz-Date',
+ 'X-Amz-Content-SHA256',
+ 'X-Amz-Security-Token',
+ ]
+ _SIGNATURE_TYPE = awscrt.auth.AwsSignatureType.HTTP_REQUEST_HEADERS
+ _USE_DOUBLE_URI_ENCODE = True
+ _SHOULD_NORMALIZE_URI_PATH = True
+
+ def __init__(self, credentials, service_name, region_name):
+ self.credentials = credentials
+ self._service_name = service_name
+ self._region_name = region_name
+ self._expiration_in_seconds = None
+
+ def add_auth(self, request):
+ register_feature_id("SIGV4A_SIGNING")
+ if self.credentials is None:
+ raise NoCredentialsError()
+
+ datetime_now = get_current_datetime(remove_tzinfo=False)
+
+ # Use existing 'X-Amz-Content-SHA256' header if able
+ existing_sha256 = self._get_existing_sha256(request)
+
+ self._modify_request_before_signing(request)
+
+ credentials_provider = awscrt.auth.AwsCredentialsProvider.new_static(
+ access_key_id=self.credentials.access_key,
+ secret_access_key=self.credentials.secret_key,
+ session_token=self.credentials.token,
+ )
+
+ if self._is_streaming_checksum_payload(request):
+ explicit_payload = STREAMING_UNSIGNED_PAYLOAD_TRAILER
+ elif self._should_sha256_sign_payload(request):
+ if existing_sha256:
+ explicit_payload = existing_sha256
+ else:
+ explicit_payload = None # to be calculated during signing
+ else:
+ explicit_payload = UNSIGNED_PAYLOAD
+
+ if self._should_add_content_sha256_header(explicit_payload):
+ body_header = (
+ awscrt.auth.AwsSignedBodyHeaderType.X_AMZ_CONTENT_SHA_256
+ )
+ else:
+ body_header = awscrt.auth.AwsSignedBodyHeaderType.NONE
+
+ signing_config = awscrt.auth.AwsSigningConfig(
+ algorithm=awscrt.auth.AwsSigningAlgorithm.V4_ASYMMETRIC,
+ signature_type=self._SIGNATURE_TYPE,
+ credentials_provider=credentials_provider,
+ region=self._region_name,
+ service=self._service_name,
+ date=datetime_now,
+ should_sign_header=self._should_sign_header,
+ use_double_uri_encode=self._USE_DOUBLE_URI_ENCODE,
+ should_normalize_uri_path=self._SHOULD_NORMALIZE_URI_PATH,
+ signed_body_value=explicit_payload,
+ signed_body_header_type=body_header,
+ expiration_in_seconds=self._expiration_in_seconds,
+ )
+ crt_request = self._crt_request_from_aws_request(request)
+ future = awscrt.auth.aws_sign_request(crt_request, signing_config)
+ future.result()
+ self._apply_signing_changes(request, crt_request)
+
+ def _crt_request_from_aws_request(self, aws_request):
+ url_parts = urlsplit(aws_request.url)
+ crt_path = url_parts.path if url_parts.path else '/'
+ if aws_request.params:
+ array = []
+ for param, value in aws_request.params.items():
+ value = str(value)
+ array.append(f'{param}={value}')
+ crt_path = crt_path + '?' + '&'.join(array)
+ elif url_parts.query:
+ crt_path = f'{crt_path}?{url_parts.query}'
+
+ crt_headers = awscrt.http.HttpHeaders(aws_request.headers.items())
+
+ # CRT requires body (if it exists) to be an I/O stream.
+ crt_body_stream = None
+ if aws_request.body:
+ if hasattr(aws_request.body, 'seek'):
+ crt_body_stream = aws_request.body
+ else:
+ crt_body_stream = BytesIO(aws_request.body)
+
+ crt_request = awscrt.http.HttpRequest(
+ method=aws_request.method,
+ path=crt_path,
+ headers=crt_headers,
+ body_stream=crt_body_stream,
+ )
+ return crt_request
+
+ def _apply_signing_changes(self, aws_request, signed_crt_request):
+ # Apply changes from signed CRT request to the AWSRequest
+ aws_request.headers = HTTPHeaders.from_pairs(
+ list(signed_crt_request.headers)
+ )
+
+ def _should_sign_header(self, name, **kwargs):
+ return name.lower() not in SIGNED_HEADERS_BLACKLIST
+
+ def _modify_request_before_signing(self, request):
+ # This could be a retry. Make sure the previous
+ # authorization headers are removed first.
+ for h in self._PRESIGNED_HEADERS_BLOCKLIST:
+ if h in request.headers:
+ del request.headers[h]
+ # If necessary, add the host header
+ if 'host' not in request.headers:
+ request.headers['host'] = _host_from_url(request.url)
+
+ def _get_existing_sha256(self, request):
+ return request.headers.get('X-Amz-Content-SHA256')
+
+ def _is_streaming_checksum_payload(self, request):
+ checksum_context = request.context.get('checksum', {})
+ algorithm = checksum_context.get('request_algorithm')
+ return isinstance(algorithm, dict) and algorithm.get('in') == 'trailer'
+
+ def _should_sha256_sign_payload(self, request):
+ # Payloads will always be signed over insecure connections.
+ if not request.url.startswith('https'):
+ return True
+
+ # Certain operations may have payload signing disabled by default.
+ # Since we don't have access to the operation model, we pass in this
+ # bit of metadata through the request context.
+ return request.context.get('payload_signing_enabled', True)
+
+ def _should_add_content_sha256_header(self, explicit_payload):
+ # only add X-Amz-Content-SHA256 header if payload is explicitly set
+ return explicit_payload is not None
+
+
+class CrtS3SigV4AsymAuth(CrtSigV4AsymAuth):
+ # For S3, we do not normalize the path.
+ _USE_DOUBLE_URI_ENCODE = False
+ _SHOULD_NORMALIZE_URI_PATH = False
+
+ def _get_existing_sha256(self, request):
+ # always recalculate
+ return None
+
+ def _should_sha256_sign_payload(self, request):
+ # S3 allows optional body signing, so to minimize the performance
+ # impact, we opt to not SHA256 sign the body on streaming uploads,
+ # provided that we're on https.
+ client_config = request.context.get('client_config')
+ s3_config = getattr(client_config, 's3', None)
+
+ # The config could be None if it isn't set, or if the customer sets it
+ # to None.
+ if s3_config is None:
+ s3_config = {}
+
+ # The explicit configuration takes precedence over any implicit
+ # configuration.
+ sign_payload = s3_config.get('payload_signing_enabled', None)
+ if sign_payload is not None:
+ return sign_payload
+
+ # We require that both content-md5 be present and https be enabled
+ # to implicitly disable body signing. The combination of TLS and
+ # content-md5 is sufficiently secure and durable for us to be
+ # confident in the request without body signing.
+ if (
+ not request.url.startswith('https')
+ or 'Content-MD5' not in request.headers
+ ):
+ return True
+
+ # If the input is streaming we disable body signing by default.
+ if request.context.get('has_streaming_input', False):
+ return False
+
+ # If the S3-specific checks had no results, delegate to the generic
+ # checks.
+ return super()._should_sha256_sign_payload(request)
+
+ def _should_add_content_sha256_header(self, explicit_payload):
+ # Always add X-Amz-Content-SHA256 header
+ return True
+
+
+class CrtSigV4AsymQueryAuth(CrtSigV4AsymAuth):
+ DEFAULT_EXPIRES = 3600
+ _SIGNATURE_TYPE = awscrt.auth.AwsSignatureType.HTTP_REQUEST_QUERY_PARAMS
+
+ def __init__(
+ self, credentials, service_name, region_name, expires=DEFAULT_EXPIRES
+ ):
+ super().__init__(credentials, service_name, region_name)
+ self._expiration_in_seconds = expires
+
+ def _modify_request_before_signing(self, request):
+ super()._modify_request_before_signing(request)
+
+ # We automatically set this header, so if it's the auto-set value we
+ # want to get rid of it since it doesn't make sense for presigned urls.
+ content_type = request.headers.get('content-type')
+ if content_type == 'application/x-www-form-urlencoded; charset=utf-8':
+ del request.headers['content-type']
+
+ # Now parse the original query string to a dict, inject our new query
+ # params, and serialize back to a query string.
+ url_parts = urlsplit(request.url)
+ # parse_qs makes each value a list, but in our case we know we won't
+ # have repeated keys so we know we have single element lists which we
+ # can convert back to scalar values.
+ query_string_parts = parse_qs(url_parts.query, keep_blank_values=True)
+ query_dict = {k: v[0] for k, v in query_string_parts.items()}
+
+ # The spec is particular about this. It *has* to be:
+ # https://?&
+ # You can't mix the two types of params together, i.e just keep doing
+ # new_query_params.update(op_params)
+ # new_query_params.update(auth_params)
+ # percent_encode_sequence(new_query_params)
+ if request.data:
+ # We also need to move the body params into the query string. To
+ # do this, we first have to convert it to a dict.
+ query_dict.update(_get_body_as_dict(request))
+ request.data = ''
+ new_query_string = percent_encode_sequence(query_dict)
+ # url_parts is a tuple (and therefore immutable) so we need to create
+ # a new url_parts with the new query string.
+ # -
+ # scheme - 0
+ # netloc - 1
+ # path - 2
+ # query - 3 <-- we're replacing this.
+ # fragment - 4
+ p = url_parts
+ new_url_parts = (p[0], p[1], p[2], new_query_string, p[4])
+ request.url = urlunsplit(new_url_parts)
+
+ def _apply_signing_changes(self, aws_request, signed_crt_request):
+ # Apply changes from signed CRT request to the AWSRequest
+ super()._apply_signing_changes(aws_request, signed_crt_request)
+
+ signed_query = urlsplit(signed_crt_request.path).query
+ p = urlsplit(aws_request.url)
+ # urlsplit() returns a tuple (and therefore immutable) so we
+ # need to create new url with the new query string.
+ # -
+ # scheme - 0
+ # netloc - 1
+ # path - 2
+ # query - 3 <-- we're replacing this.
+ # fragment - 4
+ aws_request.url = urlunsplit((p[0], p[1], p[2], signed_query, p[4]))
+
+
+class CrtS3SigV4AsymQueryAuth(CrtSigV4AsymQueryAuth):
+ """S3 SigV4A auth using query parameters.
+ This signer will sign a request using query parameters and signature
+ version 4A, i.e a "presigned url" signer.
+ """
+
+ # For S3, we do not normalize the path.
+ _USE_DOUBLE_URI_ENCODE = False
+ _SHOULD_NORMALIZE_URI_PATH = False
+
+ def _should_sha256_sign_payload(self, request):
+ # From the doc link above:
+ # "You don't include a payload hash in the Canonical Request, because
+ # when you create a presigned URL, you don't know anything about the
+ # payload. Instead, you use a constant string "UNSIGNED-PAYLOAD".
+ return False
+
+ def _should_add_content_sha256_header(self, explicit_payload):
+ # Never add X-Amz-Content-SHA256 header
+ return False
+
+
+class CrtSigV4QueryAuth(CrtSigV4Auth):
+ DEFAULT_EXPIRES = 3600
+ _SIGNATURE_TYPE = awscrt.auth.AwsSignatureType.HTTP_REQUEST_QUERY_PARAMS
+
+ def __init__(
+ self, credentials, service_name, region_name, expires=DEFAULT_EXPIRES
+ ):
+ super().__init__(credentials, service_name, region_name)
+ self._expiration_in_seconds = expires
+
+ def _modify_request_before_signing(self, request):
+ super()._modify_request_before_signing(request)
+
+ # We automatically set this header, so if it's the auto-set value we
+ # want to get rid of it since it doesn't make sense for presigned urls.
+ content_type = request.headers.get('content-type')
+ if content_type == 'application/x-www-form-urlencoded; charset=utf-8':
+ del request.headers['content-type']
+
+ # Now parse the original query string to a dict, inject our new query
+ # params, and serialize back to a query string.
+ url_parts = urlsplit(request.url)
+ # parse_qs makes each value a list, but in our case we know we won't
+ # have repeated keys so we know we have single element lists which we
+ # can convert back to scalar values.
+ query_dict = {
+ k: v[0]
+ for k, v in parse_qs(
+ url_parts.query, keep_blank_values=True
+ ).items()
+ }
+ if request.params:
+ query_dict.update(request.params)
+ request.params = {}
+ # The spec is particular about this. It *has* to be:
+ # https://?&
+ # You can't mix the two types of params together, i.e just keep doing
+ # new_query_params.update(op_params)
+ # new_query_params.update(auth_params)
+ # percent_encode_sequence(new_query_params)
+ if request.data:
+ # We also need to move the body params into the query string. To
+ # do this, we first have to convert it to a dict.
+ query_dict.update(_get_body_as_dict(request))
+ request.data = ''
+ new_query_string = percent_encode_sequence(query_dict)
+ # url_parts is a tuple (and therefore immutable) so we need to create
+ # a new url_parts with the new query string.
+ # -
+ # scheme - 0
+ # netloc - 1
+ # path - 2
+ # query - 3 <-- we're replacing this.
+ # fragment - 4
+ p = url_parts
+ new_url_parts = (p[0], p[1], p[2], new_query_string, p[4])
+ request.url = urlunsplit(new_url_parts)
+
+ def _apply_signing_changes(self, aws_request, signed_crt_request):
+ # Apply changes from signed CRT request to the AWSRequest
+ super()._apply_signing_changes(aws_request, signed_crt_request)
+
+ signed_query = urlsplit(signed_crt_request.path).query
+ p = urlsplit(aws_request.url)
+ # urlsplit() returns a tuple (and therefore immutable) so we
+ # need to create new url with the new query string.
+ # -
+ # scheme - 0
+ # netloc - 1
+ # path - 2
+ # query - 3 <-- we're replacing this.
+ # fragment - 4
+ aws_request.url = urlunsplit((p[0], p[1], p[2], signed_query, p[4]))
+
+
+class CrtS3SigV4QueryAuth(CrtSigV4QueryAuth):
+ """S3 SigV4 auth using query parameters.
+ This signer will sign a request using query parameters and signature
+ version 4, i.e a "presigned url" signer.
+ Based off of:
+ http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html
+ """
+
+ # For S3, we do not normalize the path.
+ _USE_DOUBLE_URI_ENCODE = False
+ _SHOULD_NORMALIZE_URI_PATH = False
+
+ def _should_sha256_sign_payload(self, request):
+ # From the doc link above:
+ # "You don't include a payload hash in the Canonical Request, because
+ # when you create a presigned URL, you don't know anything about the
+ # payload. Instead, you use a constant string "UNSIGNED-PAYLOAD".
+ return False
+
+ def _should_add_content_sha256_header(self, explicit_payload):
+ # Never add X-Amz-Content-SHA256 header
+ return False
+
+
+# Defined at the bottom of module to ensure all Auth
+# classes are defined.
+CRT_AUTH_TYPE_MAPS = {
+ 'v4': CrtSigV4Auth,
+ 'v4-query': CrtSigV4QueryAuth,
+ 'v4a': CrtSigV4AsymAuth,
+ 's3v4': CrtS3SigV4Auth,
+ 's3v4-query': CrtS3SigV4QueryAuth,
+ 's3v4a': CrtS3SigV4AsymAuth,
+ 's3v4a-query': CrtS3SigV4AsymQueryAuth,
+}
diff --git a/py311/lib/python3.11/site-packages/botocore/discovery.py b/py311/lib/python3.11/site-packages/botocore/discovery.py
new file mode 100644
index 0000000000000000000000000000000000000000..3d16e97dd1f66e1db5c871adf990310676210f86
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/discovery.py
@@ -0,0 +1,282 @@
+# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import logging
+import time
+import weakref
+
+from botocore import xform_name
+from botocore.exceptions import BotoCoreError, ConnectionError, HTTPClientError
+from botocore.model import OperationNotFoundError
+from botocore.utils import CachedProperty
+
+logger = logging.getLogger(__name__)
+
+
+class EndpointDiscoveryException(BotoCoreError):
+ pass
+
+
+class EndpointDiscoveryRequired(EndpointDiscoveryException):
+ """Endpoint Discovery is disabled but is required for this operation."""
+
+ fmt = 'Endpoint Discovery is not enabled but this operation requires it.'
+
+
+class EndpointDiscoveryRefreshFailed(EndpointDiscoveryException):
+ """Endpoint Discovery failed to the refresh the known endpoints."""
+
+ fmt = 'Endpoint Discovery failed to refresh the required endpoints.'
+
+
+def block_endpoint_discovery_required_operations(model, **kwargs):
+ endpoint_discovery = model.endpoint_discovery
+ if endpoint_discovery and endpoint_discovery.get('required'):
+ raise EndpointDiscoveryRequired()
+
+
+class EndpointDiscoveryModel:
+ def __init__(self, service_model):
+ self._service_model = service_model
+
+ @CachedProperty
+ def discovery_operation_name(self):
+ discovery_operation = self._service_model.endpoint_discovery_operation
+ return xform_name(discovery_operation.name)
+
+ @CachedProperty
+ def discovery_operation_keys(self):
+ discovery_operation = self._service_model.endpoint_discovery_operation
+ keys = []
+ if discovery_operation.input_shape:
+ keys = list(discovery_operation.input_shape.members.keys())
+ return keys
+
+ def discovery_required_for(self, operation_name):
+ try:
+ operation_model = self._service_model.operation_model(
+ operation_name
+ )
+ return operation_model.endpoint_discovery.get('required', False)
+ except OperationNotFoundError:
+ return False
+
+ def discovery_operation_kwargs(self, **kwargs):
+ input_keys = self.discovery_operation_keys
+ # Operation and Identifiers are only sent if there are Identifiers
+ if not kwargs.get('Identifiers'):
+ kwargs.pop('Operation', None)
+ kwargs.pop('Identifiers', None)
+ return {k: v for k, v in kwargs.items() if k in input_keys}
+
+ def gather_identifiers(self, operation, params):
+ return self._gather_ids(operation.input_shape, params)
+
+ def _gather_ids(self, shape, params, ids=None):
+ # Traverse the input shape and corresponding parameters, gathering
+ # any input fields labeled as an endpoint discovery id
+ if ids is None:
+ ids = {}
+ for member_name, member_shape in shape.members.items():
+ if member_shape.metadata.get('endpointdiscoveryid'):
+ ids[member_name] = params[member_name]
+ elif (
+ member_shape.type_name == 'structure' and member_name in params
+ ):
+ self._gather_ids(member_shape, params[member_name], ids)
+ return ids
+
+
+class EndpointDiscoveryManager:
+ def __init__(
+ self, client, cache=None, current_time=None, always_discover=True
+ ):
+ if cache is None:
+ cache = {}
+ self._cache = cache
+ self._failed_attempts = {}
+ if current_time is None:
+ current_time = time.time
+ self._time = current_time
+ self._always_discover = always_discover
+
+ # This needs to be a weak ref in order to prevent memory leaks on
+ # python 2.6
+ self._client = weakref.proxy(client)
+ self._model = EndpointDiscoveryModel(client.meta.service_model)
+
+ def _parse_endpoints(self, response):
+ endpoints = response['Endpoints']
+ current_time = self._time()
+ for endpoint in endpoints:
+ cache_time = endpoint.get('CachePeriodInMinutes')
+ endpoint['Expiration'] = current_time + cache_time * 60
+ return endpoints
+
+ def _cache_item(self, value):
+ if isinstance(value, dict):
+ return tuple(sorted(value.items()))
+ else:
+ return value
+
+ def _create_cache_key(self, **kwargs):
+ kwargs = self._model.discovery_operation_kwargs(**kwargs)
+ return tuple(self._cache_item(v) for k, v in sorted(kwargs.items()))
+
+ def gather_identifiers(self, operation, params):
+ return self._model.gather_identifiers(operation, params)
+
+ def delete_endpoints(self, **kwargs):
+ cache_key = self._create_cache_key(**kwargs)
+ if cache_key in self._cache:
+ del self._cache[cache_key]
+
+ def _describe_endpoints(self, **kwargs):
+ # This is effectively a proxy to whatever name/kwargs the service
+ # supports for endpoint discovery.
+ kwargs = self._model.discovery_operation_kwargs(**kwargs)
+ operation_name = self._model.discovery_operation_name
+ discovery_operation = getattr(self._client, operation_name)
+ logger.debug('Discovering endpoints with kwargs: %s', kwargs)
+ return discovery_operation(**kwargs)
+
+ def _get_current_endpoints(self, key):
+ if key not in self._cache:
+ return None
+ now = self._time()
+ return [e for e in self._cache[key] if now < e['Expiration']]
+
+ def _refresh_current_endpoints(self, **kwargs):
+ cache_key = self._create_cache_key(**kwargs)
+ try:
+ response = self._describe_endpoints(**kwargs)
+ endpoints = self._parse_endpoints(response)
+ self._cache[cache_key] = endpoints
+ self._failed_attempts.pop(cache_key, None)
+ return endpoints
+ except (ConnectionError, HTTPClientError):
+ self._failed_attempts[cache_key] = self._time() + 60
+ return None
+
+ def _recently_failed(self, cache_key):
+ if cache_key in self._failed_attempts:
+ now = self._time()
+ if now < self._failed_attempts[cache_key]:
+ return True
+ del self._failed_attempts[cache_key]
+ return False
+
+ def _select_endpoint(self, endpoints):
+ return endpoints[0]['Address']
+
+ def describe_endpoint(self, **kwargs):
+ operation = kwargs['Operation']
+ discovery_required = self._model.discovery_required_for(operation)
+
+ if not self._always_discover and not discovery_required:
+ # Discovery set to only run on required operations
+ logger.debug(
+ 'Optional discovery disabled. Skipping discovery for Operation: %s',
+ operation,
+ )
+ return None
+
+ # Get the endpoint for the provided operation and identifiers
+ cache_key = self._create_cache_key(**kwargs)
+ endpoints = self._get_current_endpoints(cache_key)
+ if endpoints:
+ return self._select_endpoint(endpoints)
+ # All known endpoints are stale
+ recently_failed = self._recently_failed(cache_key)
+ if not recently_failed:
+ # We haven't failed to discover recently, go ahead and refresh
+ endpoints = self._refresh_current_endpoints(**kwargs)
+ if endpoints:
+ return self._select_endpoint(endpoints)
+ # Discovery has failed recently, do our best to get an endpoint
+ logger.debug('Endpoint Discovery has failed for: %s', kwargs)
+ stale_entries = self._cache.get(cache_key, None)
+ if stale_entries:
+ # We have stale entries, use those while discovery is failing
+ return self._select_endpoint(stale_entries)
+ if discovery_required:
+ # It looks strange to be checking recently_failed again but,
+ # this informs us as to whether or not we tried to refresh earlier
+ if recently_failed:
+ # Discovery is required and we haven't already refreshed
+ endpoints = self._refresh_current_endpoints(**kwargs)
+ if endpoints:
+ return self._select_endpoint(endpoints)
+ # No endpoints even refresh, raise hard error
+ raise EndpointDiscoveryRefreshFailed()
+ # Discovery is optional, just use the default endpoint for now
+ return None
+
+
+class EndpointDiscoveryHandler:
+ def __init__(self, manager):
+ self._manager = manager
+
+ def register(self, events, service_id):
+ events.register(
+ f'before-parameter-build.{service_id}', self.gather_identifiers
+ )
+ events.register_first(
+ f'request-created.{service_id}', self.discover_endpoint
+ )
+ events.register(f'needs-retry.{service_id}', self.handle_retries)
+
+ def gather_identifiers(self, params, model, context, **kwargs):
+ endpoint_discovery = model.endpoint_discovery
+ # Only continue if the operation supports endpoint discovery
+ if endpoint_discovery is None:
+ return
+ ids = self._manager.gather_identifiers(model, params)
+ context['discovery'] = {'identifiers': ids}
+
+ def discover_endpoint(self, request, operation_name, **kwargs):
+ ids = request.context.get('discovery', {}).get('identifiers')
+ if ids is None:
+ return
+ endpoint = self._manager.describe_endpoint(
+ Operation=operation_name, Identifiers=ids
+ )
+ if endpoint is None:
+ logger.debug('Failed to discover and inject endpoint')
+ return
+ if not endpoint.startswith('http'):
+ endpoint = 'https://' + endpoint
+ logger.debug('Injecting discovered endpoint: %s', endpoint)
+ request.url = endpoint
+
+ def handle_retries(self, request_dict, response, operation, **kwargs):
+ if response is None:
+ return None
+
+ _, response = response
+ status = response.get('ResponseMetadata', {}).get('HTTPStatusCode')
+ error_code = response.get('Error', {}).get('Code')
+ if status != 421 and error_code != 'InvalidEndpointException':
+ return None
+
+ context = request_dict.get('context', {})
+ ids = context.get('discovery', {}).get('identifiers')
+ if ids is None:
+ return None
+
+ # Delete the cached endpoints, forcing a refresh on retry
+ # TODO: Improve eviction behavior to only evict the bad endpoint if
+ # there are multiple. This will almost certainly require a lock.
+ self._manager.delete_endpoints(
+ Operation=operation.name, Identifiers=ids
+ )
+ return 0
diff --git a/py311/lib/python3.11/site-packages/botocore/docs/__init__.py b/py311/lib/python3.11/site-packages/botocore/docs/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..844f5de59b05a334d9142fdbf087d6870e94970d
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/docs/__init__.py
@@ -0,0 +1,54 @@
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import os
+
+from botocore.docs.service import ServiceDocumenter
+
+DEPRECATED_SERVICE_NAMES = {'sms-voice'}
+
+
+def generate_docs(root_dir, session):
+ """Generates the reference documentation for botocore
+
+ This will go through every available AWS service and output ReSTructured
+ text files documenting each service.
+
+ :param root_dir: The directory to write the reference files to. Each
+ service's reference documentation is loacated at
+ root_dir/reference/services/service-name.rst
+ """
+ # Create the root directory where all service docs live.
+ services_dir_path = os.path.join(root_dir, 'reference', 'services')
+ if not os.path.exists(services_dir_path):
+ os.makedirs(services_dir_path)
+
+ # Prevents deprecated service names from being generated in docs.
+ available_services = [
+ service
+ for service in session.get_available_services()
+ if service not in DEPRECATED_SERVICE_NAMES
+ ]
+
+ # Generate reference docs and write them out.
+ for service_name in available_services:
+ docs = ServiceDocumenter(
+ service_name, session, services_dir_path
+ ).document_service()
+
+ # Write the main service documentation page.
+ # Path: /reference/services//index.rst
+ service_file_path = os.path.join(
+ services_dir_path, f'{service_name}.rst'
+ )
+ with open(service_file_path, 'wb') as f:
+ f.write(docs)
diff --git a/py311/lib/python3.11/site-packages/botocore/docs/client.py b/py311/lib/python3.11/site-packages/botocore/docs/client.py
new file mode 100644
index 0000000000000000000000000000000000000000..41e37426ec8f8d713f52a8678763f6f77b61dac1
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/docs/client.py
@@ -0,0 +1,453 @@
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import os
+
+from botocore import xform_name
+from botocore.compat import OrderedDict
+from botocore.docs.bcdoc.restdoc import DocumentStructure
+from botocore.docs.example import ResponseExampleDocumenter
+from botocore.docs.method import (
+ document_custom_method,
+ document_model_driven_method,
+ get_instance_public_methods,
+)
+from botocore.docs.params import ResponseParamsDocumenter
+from botocore.docs.sharedexample import document_shared_examples
+from botocore.docs.utils import DocumentedShape, get_official_service_name
+
+
+def _allowlist_generate_presigned_url(method_name, service_name, **kwargs):
+ if method_name != 'generate_presigned_url':
+ return None
+ return service_name in ['s3']
+
+
+class ClientDocumenter:
+ _CLIENT_METHODS_FILTERS = [
+ _allowlist_generate_presigned_url,
+ ]
+
+ def __init__(self, client, root_docs_path, shared_examples=None):
+ self._client = client
+ self._client_class_name = self._client.__class__.__name__
+ self._root_docs_path = root_docs_path
+ self._shared_examples = shared_examples
+ if self._shared_examples is None:
+ self._shared_examples = {}
+ self._service_name = self._client.meta.service_model.service_name
+
+ def document_client(self, section):
+ """Documents a client and its methods
+
+ :param section: The section to write to.
+ """
+ self._add_title(section)
+ self._add_class_signature(section)
+ client_methods = self._get_client_methods()
+ self._add_client_intro(section, client_methods)
+ self._add_client_methods(client_methods)
+
+ def _get_client_methods(self):
+ client_methods = get_instance_public_methods(self._client)
+ return self._filter_client_methods(client_methods)
+
+ def _filter_client_methods(self, client_methods):
+ filtered_methods = {}
+ for method_name, method in client_methods.items():
+ include = self._filter_client_method(
+ method=method,
+ method_name=method_name,
+ service_name=self._service_name,
+ )
+ if include:
+ filtered_methods[method_name] = method
+ return filtered_methods
+
+ def _filter_client_method(self, **kwargs):
+ # Apply each filter to the method
+ for filter in self._CLIENT_METHODS_FILTERS:
+ filter_include = filter(**kwargs)
+ # Use the first non-None value returned by any of the filters
+ if filter_include is not None:
+ return filter_include
+ # Otherwise default to including it
+ return True
+
+ def _add_title(self, section):
+ section.style.h2('Client')
+
+ def _add_client_intro(self, section, client_methods):
+ section = section.add_new_section('intro')
+ # Write out the top level description for the client.
+ official_service_name = get_official_service_name(
+ self._client.meta.service_model
+ )
+ section.write(
+ f"A low-level client representing {official_service_name}"
+ )
+ section.style.new_line()
+ section.include_doc_string(
+ self._client.meta.service_model.documentation
+ )
+
+ # Write out the client example instantiation.
+ self._add_client_creation_example(section)
+
+ # List out all of the possible client methods.
+ section.style.dedent()
+ section.style.new_paragraph()
+ section.writeln('These are the available methods:')
+ section.style.toctree()
+ for method_name in sorted(client_methods):
+ section.style.tocitem(f'{self._service_name}/client/{method_name}')
+
+ def _add_class_signature(self, section):
+ section.style.start_sphinx_py_class(
+ class_name=f'{self._client_class_name}.Client'
+ )
+
+ def _add_client_creation_example(self, section):
+ section.style.start_codeblock()
+ section.style.new_line()
+ section.write(
+ f'client = session.create_client(\'{self._service_name}\')'
+ )
+ section.style.end_codeblock()
+
+ def _add_client_methods(self, client_methods):
+ for method_name in sorted(client_methods):
+ # Create a new DocumentStructure for each client method and add contents.
+ method_doc_structure = DocumentStructure(
+ method_name, target='html'
+ )
+ self._add_client_method(
+ method_doc_structure, method_name, client_methods[method_name]
+ )
+ # Write client methods in individual/nested files.
+ # Path: /reference/services//client/.rst
+ client_dir_path = os.path.join(
+ self._root_docs_path, self._service_name, 'client'
+ )
+ method_doc_structure.write_to_file(client_dir_path, method_name)
+
+ def _add_client_method(self, section, method_name, method):
+ breadcrumb_section = section.add_new_section('breadcrumb')
+ breadcrumb_section.style.ref(
+ self._client_class_name, f'../../{self._service_name}'
+ )
+ breadcrumb_section.write(f' / Client / {method_name}')
+ section.add_title_section(method_name)
+ method_section = section.add_new_section(
+ method_name,
+ context={'qualifier': f'{self._client_class_name}.Client.'},
+ )
+ if self._is_custom_method(method_name):
+ self._add_custom_method(
+ method_section,
+ method_name,
+ method,
+ )
+ else:
+ self._add_model_driven_method(method_section, method_name)
+
+ def _is_custom_method(self, method_name):
+ return method_name not in self._client.meta.method_to_api_mapping
+
+ def _add_custom_method(self, section, method_name, method):
+ document_custom_method(section, method_name, method)
+
+ def _add_method_exceptions_list(self, section, operation_model):
+ error_section = section.add_new_section('exceptions')
+ error_section.style.new_line()
+ error_section.style.bold('Exceptions')
+ error_section.style.new_line()
+ for error in operation_model.error_shapes:
+ class_name = (
+ f'{self._client_class_name}.Client.exceptions.{error.name}'
+ )
+ error_section.style.li(f':py:class:`{class_name}`')
+
+ def _add_model_driven_method(self, section, method_name):
+ service_model = self._client.meta.service_model
+ operation_name = self._client.meta.method_to_api_mapping[method_name]
+ operation_model = service_model.operation_model(operation_name)
+
+ example_prefix = f'response = client.{method_name}'
+ full_method_name = (
+ f"{section.context.get('qualifier', '')}{method_name}"
+ )
+ document_model_driven_method(
+ section,
+ full_method_name,
+ operation_model,
+ event_emitter=self._client.meta.events,
+ method_description=operation_model.documentation,
+ example_prefix=example_prefix,
+ )
+
+ # Add any modeled exceptions
+ if operation_model.error_shapes:
+ self._add_method_exceptions_list(section, operation_model)
+
+ # Add the shared examples
+ shared_examples = self._shared_examples.get(operation_name)
+ if shared_examples:
+ document_shared_examples(
+ section, operation_model, example_prefix, shared_examples
+ )
+
+
+class ClientExceptionsDocumenter:
+ _USER_GUIDE_LINK = (
+ 'https://boto3.amazonaws.com/'
+ 'v1/documentation/api/latest/guide/error-handling.html'
+ )
+ _GENERIC_ERROR_SHAPE = DocumentedShape(
+ name='Error',
+ type_name='structure',
+ documentation=('Normalized access to common exception attributes.'),
+ members=OrderedDict(
+ [
+ (
+ 'Code',
+ DocumentedShape(
+ name='Code',
+ type_name='string',
+ documentation=(
+ 'An identifier specifying the exception type.'
+ ),
+ ),
+ ),
+ (
+ 'Message',
+ DocumentedShape(
+ name='Message',
+ type_name='string',
+ documentation=(
+ 'A descriptive message explaining why the exception '
+ 'occured.'
+ ),
+ ),
+ ),
+ ]
+ ),
+ )
+
+ def __init__(self, client, root_docs_path):
+ self._client = client
+ self._client_class_name = self._client.__class__.__name__
+ self._service_name = self._client.meta.service_model.service_name
+ self._root_docs_path = root_docs_path
+
+ def document_exceptions(self, section):
+ self._add_title(section)
+ self._add_overview(section)
+ self._add_exceptions_list(section)
+ self._add_exception_classes()
+
+ def _add_title(self, section):
+ section.style.h2('Client Exceptions')
+
+ def _add_overview(self, section):
+ section.style.new_line()
+ section.write(
+ 'Client exceptions are available on a client instance '
+ 'via the ``exceptions`` property. For more detailed instructions '
+ 'and examples on the exact usage of client exceptions, see the '
+ 'error handling '
+ )
+ section.style.external_link(
+ title='user guide',
+ link=self._USER_GUIDE_LINK,
+ )
+ section.write('.')
+ section.style.new_line()
+
+ def _exception_class_name(self, shape):
+ return f'{self._client_class_name}.Client.exceptions.{shape.name}'
+
+ def _add_exceptions_list(self, section):
+ error_shapes = self._client.meta.service_model.error_shapes
+ if not error_shapes:
+ section.style.new_line()
+ section.write('This client has no modeled exception classes.')
+ section.style.new_line()
+ return
+ section.style.new_line()
+ section.writeln('The available client exceptions are:')
+ section.style.toctree()
+ for shape in error_shapes:
+ section.style.tocitem(
+ f'{self._service_name}/client/exceptions/{shape.name}'
+ )
+
+ def _add_exception_classes(self):
+ for shape in self._client.meta.service_model.error_shapes:
+ # Create a new DocumentStructure for each exception method and add contents.
+ exception_doc_structure = DocumentStructure(
+ shape.name, target='html'
+ )
+ self._add_exception_class(exception_doc_structure, shape)
+ # Write exceptions in individual/nested files.
+ # Path: /reference/services//client/exceptions/.rst
+ exception_dir_path = os.path.join(
+ self._root_docs_path,
+ self._service_name,
+ 'client',
+ 'exceptions',
+ )
+ exception_doc_structure.write_to_file(
+ exception_dir_path, shape.name
+ )
+
+ def _add_exception_class(self, section, shape):
+ breadcrumb_section = section.add_new_section('breadcrumb')
+ breadcrumb_section.style.ref(
+ self._client_class_name, f'../../../{self._service_name}'
+ )
+ breadcrumb_section.write(f' / Client / exceptions / {shape.name}')
+ section.add_title_section(shape.name)
+ class_section = section.add_new_section(shape.name)
+ class_name = self._exception_class_name(shape)
+ class_section.style.start_sphinx_py_class(class_name=class_name)
+ self._add_top_level_documentation(class_section, shape)
+ self._add_exception_catch_example(class_section, shape)
+ self._add_response_attr(class_section, shape)
+ class_section.style.end_sphinx_py_class()
+
+ def _add_top_level_documentation(self, section, shape):
+ if shape.documentation:
+ section.style.new_line()
+ section.include_doc_string(shape.documentation)
+ section.style.new_line()
+
+ def _add_exception_catch_example(self, section, shape):
+ section.style.new_line()
+ section.style.bold('Example')
+ section.style.new_paragraph()
+ section.style.start_codeblock()
+ section.write('try:')
+ section.style.indent()
+ section.style.new_line()
+ section.write('...')
+ section.style.dedent()
+ section.style.new_line()
+ section.write(f'except client.exceptions.{shape.name} as e:')
+ section.style.indent()
+ section.style.new_line()
+ section.write('print(e.response)')
+ section.style.dedent()
+ section.style.end_codeblock()
+
+ def _add_response_attr(self, section, shape):
+ response_section = section.add_new_section('response')
+ response_section.style.start_sphinx_py_attr('response')
+ self._add_response_attr_description(response_section)
+ self._add_response_example(response_section, shape)
+ self._add_response_params(response_section, shape)
+ response_section.style.end_sphinx_py_attr()
+
+ def _add_response_attr_description(self, section):
+ section.style.new_line()
+ section.include_doc_string(
+ 'The parsed error response. All exceptions have a top level '
+ '``Error`` key that provides normalized access to common '
+ 'exception atrributes. All other keys are specific to this '
+ 'service or exception class.'
+ )
+ section.style.new_line()
+
+ def _add_response_example(self, section, shape):
+ example_section = section.add_new_section('syntax')
+ example_section.style.new_line()
+ example_section.style.bold('Syntax')
+ example_section.style.new_paragraph()
+ documenter = ResponseExampleDocumenter(
+ service_name=self._service_name,
+ operation_name=None,
+ event_emitter=self._client.meta.events,
+ )
+ documenter.document_example(
+ example_section,
+ shape,
+ include=[self._GENERIC_ERROR_SHAPE],
+ )
+
+ def _add_response_params(self, section, shape):
+ params_section = section.add_new_section('Structure')
+ params_section.style.new_line()
+ params_section.style.bold('Structure')
+ params_section.style.new_paragraph()
+ documenter = ResponseParamsDocumenter(
+ service_name=self._service_name,
+ operation_name=None,
+ event_emitter=self._client.meta.events,
+ )
+ documenter.document_params(
+ params_section,
+ shape,
+ include=[self._GENERIC_ERROR_SHAPE],
+ )
+
+
+class ClientContextParamsDocumenter:
+ _CONFIG_GUIDE_LINK = (
+ 'https://boto3.amazonaws.com/'
+ 'v1/documentation/api/latest/guide/configuration.html'
+ )
+
+ OMITTED_CONTEXT_PARAMS = {
+ 's3': (
+ 'Accelerate',
+ 'DisableMultiRegionAccessPoints',
+ 'ForcePathStyle',
+ 'UseArnRegion',
+ ),
+ 's3control': ('UseArnRegion',),
+ }
+
+ def __init__(self, service_name, context_params):
+ self._service_name = service_name
+ self._context_params = context_params
+
+ def document_context_params(self, section):
+ self._add_title(section)
+ self._add_overview(section)
+ self._add_context_params_list(section)
+
+ def _add_title(self, section):
+ section.style.h2('Client Context Parameters')
+
+ def _add_overview(self, section):
+ section.style.new_line()
+ section.write(
+ 'Client context parameters are configurable on a client '
+ 'instance via the ``client_context_params`` parameter in the '
+ '``Config`` object. For more detailed instructions and examples '
+ 'on the exact usage of context params see the '
+ )
+ section.style.external_link(
+ title='configuration guide',
+ link=self._CONFIG_GUIDE_LINK,
+ )
+ section.write('.')
+ section.style.new_line()
+
+ def _add_context_params_list(self, section):
+ section.style.new_line()
+ sn = f'``{self._service_name}``'
+ section.writeln(f'The available {sn} client context params are:')
+ for param in self._context_params:
+ section.style.new_line()
+ name = f'``{xform_name(param.name)}``'
+ section.write(f'* {name} ({param.type}) - {param.documentation}')
diff --git a/py311/lib/python3.11/site-packages/botocore/docs/docstring.py b/py311/lib/python3.11/site-packages/botocore/docs/docstring.py
new file mode 100644
index 0000000000000000000000000000000000000000..93b2e6b23cc5b295fe23a151ae3a0ffe797f0db6
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/docs/docstring.py
@@ -0,0 +1,97 @@
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore.docs.bcdoc.restdoc import DocumentStructure
+from botocore.docs.method import document_model_driven_method
+from botocore.docs.paginator import document_paginate_method
+from botocore.docs.waiter import document_wait_method
+
+
+class LazyLoadedDocstring(str):
+ """Used for lazily loading docstrings
+
+ You can instantiate this class and assign it to a __doc__ value.
+ The docstring will not be generated till accessed via __doc__ or
+ help(). Note that all docstring classes **must** subclass from
+ this class. It cannot be used directly as a docstring.
+ """
+
+ def __init__(self, *args, **kwargs):
+ """
+ The args and kwargs are the same as the underlying document
+ generation function. These just get proxied to the underlying
+ function.
+ """
+ super().__init__()
+ self._gen_args = args
+ self._gen_kwargs = kwargs
+ self._docstring = None
+
+ def __new__(cls, *args, **kwargs):
+ # Needed in order to sub class from str with args and kwargs
+ return super().__new__(cls)
+
+ def _write_docstring(self, *args, **kwargs):
+ raise NotImplementedError(
+ '_write_docstring is not implemented. Please subclass from '
+ 'this class and provide your own _write_docstring method'
+ )
+
+ def expandtabs(self, tabsize=8):
+ """Expands tabs to spaces
+
+ So this is a big hack in order to get lazy loaded docstring work
+ for the ``help()``. In the ``help()`` function, ``pydoc`` and
+ ``inspect`` are used. At some point the ``inspect.cleandoc``
+ method is called. To clean the docs ``expandtabs`` is called
+ and that is where we override the method to generate and return the
+ docstrings.
+ """
+ if self._docstring is None:
+ self._generate()
+ return self._docstring.expandtabs(tabsize)
+
+ def __str__(self):
+ return self._generate()
+
+ # __doc__ of target will use either __repr__ or __str__ of this class.
+ __repr__ = __str__
+
+ def _generate(self):
+ # Generate the docstring if it is not already cached.
+ if self._docstring is None:
+ self._docstring = self._create_docstring()
+ return self._docstring
+
+ def _create_docstring(self):
+ docstring_structure = DocumentStructure('docstring', target='html')
+ # Call the document method function with the args and kwargs
+ # passed to the class.
+ self._write_docstring(
+ docstring_structure, *self._gen_args, **self._gen_kwargs
+ )
+ return docstring_structure.flush_structure().decode('utf-8')
+
+
+class ClientMethodDocstring(LazyLoadedDocstring):
+ def _write_docstring(self, *args, **kwargs):
+ document_model_driven_method(*args, **kwargs)
+
+
+class WaiterDocstring(LazyLoadedDocstring):
+ def _write_docstring(self, *args, **kwargs):
+ document_wait_method(*args, **kwargs)
+
+
+class PaginatorDocstring(LazyLoadedDocstring):
+ def _write_docstring(self, *args, **kwargs):
+ document_paginate_method(*args, **kwargs)
diff --git a/py311/lib/python3.11/site-packages/botocore/docs/example.py b/py311/lib/python3.11/site-packages/botocore/docs/example.py
new file mode 100644
index 0000000000000000000000000000000000000000..cb43db550962030728a69d3b47ae50d40c15fa35
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/docs/example.py
@@ -0,0 +1,236 @@
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore.docs.shape import ShapeDocumenter
+from botocore.docs.utils import py_default
+
+
+class BaseExampleDocumenter(ShapeDocumenter):
+ def document_example(
+ self, section, shape, prefix=None, include=None, exclude=None
+ ):
+ """Generates an example based on a shape
+
+ :param section: The section to write the documentation to.
+
+ :param shape: The shape of the operation.
+
+ :param prefix: Anything to be included before the example
+
+ :type include: Dictionary where keys are parameter names and
+ values are the shapes of the parameter names.
+ :param include: The parameter shapes to include in the documentation.
+
+ :type exclude: List of the names of the parameters to exclude.
+ :param exclude: The names of the parameters to exclude from
+ documentation.
+ """
+ history = []
+ section.style.new_line()
+ section.style.start_codeblock()
+ if prefix is not None:
+ section.write(prefix)
+ self.traverse_and_document_shape(
+ section=section,
+ shape=shape,
+ history=history,
+ include=include,
+ exclude=exclude,
+ )
+ final_blank_line_section = section.add_new_section('final-blank-line')
+ final_blank_line_section.style.new_line()
+
+ def document_recursive_shape(self, section, shape, **kwargs):
+ section.write('{\'... recursive ...\'}')
+
+ def document_shape_default(
+ self, section, shape, history, include=None, exclude=None, **kwargs
+ ):
+ py_type = self._get_special_py_default(shape)
+ if py_type is None:
+ py_type = py_default(shape.type_name)
+
+ if self._context.get('streaming_shape') == shape:
+ py_type = 'StreamingBody()'
+ section.write(py_type)
+
+ def document_shape_type_string(
+ self, section, shape, history, include=None, exclude=None, **kwargs
+ ):
+ if 'enum' in shape.metadata:
+ for i, enum in enumerate(shape.metadata['enum']):
+ section.write(f'\'{enum}\'')
+ if i < len(shape.metadata['enum']) - 1:
+ section.write('|')
+ else:
+ self.document_shape_default(section, shape, history)
+
+ def document_shape_type_list(
+ self, section, shape, history, include=None, exclude=None, **kwargs
+ ):
+ param_shape = shape.member
+ list_section = section.add_new_section('list-value')
+ self._start_nested_param(list_section, '[')
+ param_section = list_section.add_new_section(
+ 'member', context={'shape': param_shape.name}
+ )
+ self.traverse_and_document_shape(
+ section=param_section, shape=param_shape, history=history
+ )
+ ending_comma_section = list_section.add_new_section('ending-comma')
+ ending_comma_section.write(',')
+ ending_bracket_section = list_section.add_new_section('ending-bracket')
+ self._end_nested_param(ending_bracket_section, ']')
+
+ def document_shape_type_structure(
+ self, section, shape, history, include=None, exclude=None, **kwargs
+ ):
+ if not shape.members:
+ section.write('{}')
+ return
+
+ section = section.add_new_section('structure-value')
+ self._start_nested_param(section, '{')
+
+ input_members = self._add_members_to_shape(shape.members, include)
+
+ for i, param in enumerate(input_members):
+ if exclude and param in exclude:
+ continue
+ param_section = section.add_new_section(param)
+ param_section.write(f'\'{param}\': ')
+ param_shape = input_members[param]
+ param_value_section = param_section.add_new_section(
+ 'member-value', context={'shape': param_shape.name}
+ )
+ self.traverse_and_document_shape(
+ section=param_value_section,
+ shape=param_shape,
+ history=history,
+ name=param,
+ )
+ if i < len(input_members) - 1:
+ ending_comma_section = param_section.add_new_section(
+ 'ending-comma'
+ )
+ ending_comma_section.write(',')
+ ending_comma_section.style.new_line()
+ self._end_structure(section, '{', '}')
+
+ def document_shape_type_map(
+ self, section, shape, history, include=None, exclude=None, **kwargs
+ ):
+ map_section = section.add_new_section('map-value')
+ self._start_nested_param(map_section, '{')
+ value_shape = shape.value
+ key_section = map_section.add_new_section(
+ 'key', context={'shape': shape.key.name}
+ )
+ key_section.write('\'string\': ')
+ value_section = map_section.add_new_section(
+ 'value', context={'shape': value_shape.name}
+ )
+ self.traverse_and_document_shape(
+ section=value_section, shape=value_shape, history=history
+ )
+ end_bracket_section = map_section.add_new_section('ending-bracket')
+ self._end_nested_param(end_bracket_section, '}')
+
+ def _add_members_to_shape(self, members, include):
+ if include:
+ members = members.copy()
+ for param in include:
+ members[param.name] = param
+ return members
+
+ def _start_nested_param(self, section, start=None):
+ if start is not None:
+ section.write(start)
+ section.style.indent()
+ section.style.indent()
+ section.style.new_line()
+
+ def _end_nested_param(self, section, end=None):
+ section.style.dedent()
+ section.style.dedent()
+ section.style.new_line()
+ if end is not None:
+ section.write(end)
+
+ def _end_structure(self, section, start, end):
+ # If there are no members in the strucuture, then make sure the
+ # start and the end bracket are on the same line, by removing all
+ # previous text and writing the start and end.
+ if not section.available_sections:
+ section.clear_text()
+ section.write(start + end)
+ self._end_nested_param(section)
+ else:
+ end_bracket_section = section.add_new_section('ending-bracket')
+ self._end_nested_param(end_bracket_section, end)
+
+
+class ResponseExampleDocumenter(BaseExampleDocumenter):
+ EVENT_NAME = 'response-example'
+
+ def document_shape_type_event_stream(
+ self, section, shape, history, **kwargs
+ ):
+ section.write('EventStream(')
+ self.document_shape_type_structure(section, shape, history, **kwargs)
+ end_section = section.add_new_section('event-stream-end')
+ end_section.write(')')
+
+
+class RequestExampleDocumenter(BaseExampleDocumenter):
+ EVENT_NAME = 'request-example'
+
+ def document_shape_type_structure(
+ self, section, shape, history, include=None, exclude=None, **kwargs
+ ):
+ param_format = '\'%s\''
+ operator = ': '
+ start = '{'
+ end = '}'
+
+ if len(history) <= 1:
+ operator = '='
+ start = '('
+ end = ')'
+ param_format = '%s'
+ section = section.add_new_section('structure-value')
+ self._start_nested_param(section, start)
+ input_members = self._add_members_to_shape(shape.members, include)
+
+ for i, param in enumerate(input_members):
+ if exclude and param in exclude:
+ continue
+ param_section = section.add_new_section(param)
+ param_section.write(param_format % param)
+ param_section.write(operator)
+ param_shape = input_members[param]
+ param_value_section = param_section.add_new_section(
+ 'member-value', context={'shape': param_shape.name}
+ )
+ self.traverse_and_document_shape(
+ section=param_value_section,
+ shape=param_shape,
+ history=history,
+ name=param,
+ )
+ if i < len(input_members) - 1:
+ ending_comma_section = param_section.add_new_section(
+ 'ending-comma'
+ )
+ ending_comma_section.write(',')
+ ending_comma_section.style.new_line()
+ self._end_structure(section, start, end)
diff --git a/py311/lib/python3.11/site-packages/botocore/docs/method.py b/py311/lib/python3.11/site-packages/botocore/docs/method.py
new file mode 100644
index 0000000000000000000000000000000000000000..5db906c8ddd5c1278620bcf7479d78b6cf6ea903
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/docs/method.py
@@ -0,0 +1,328 @@
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import inspect
+import types
+
+from botocore.docs.example import (
+ RequestExampleDocumenter,
+ ResponseExampleDocumenter,
+)
+from botocore.docs.params import (
+ RequestParamsDocumenter,
+ ResponseParamsDocumenter,
+)
+
+AWS_DOC_BASE = 'https://docs.aws.amazon.com/goto/WebAPI'
+
+
+def get_instance_public_methods(instance):
+ """Retrieves an objects public methods
+
+ :param instance: The instance of the class to inspect
+ :rtype: dict
+ :returns: A dictionary that represents an instance's methods where
+ the keys are the name of the methods and the
+ values are the handler to the method.
+ """
+ instance_members = inspect.getmembers(instance)
+ instance_methods = {}
+ for name, member in instance_members:
+ if not name.startswith('_'):
+ if inspect.ismethod(member):
+ instance_methods[name] = member
+ return instance_methods
+
+
+def document_model_driven_signature(
+ section, name, operation_model, include=None, exclude=None
+):
+ """Documents the signature of a model-driven method
+
+ :param section: The section to write the documentation to.
+
+ :param name: The name of the method
+
+ :param operation_model: The operation model for the method
+
+ :type include: Dictionary where keys are parameter names and
+ values are the shapes of the parameter names.
+ :param include: The parameter shapes to include in the documentation.
+
+ :type exclude: List of the names of the parameters to exclude.
+ :param exclude: The names of the parameters to exclude from
+ documentation.
+ """
+ params = {}
+ if operation_model.input_shape:
+ params = operation_model.input_shape.members
+
+ parameter_names = list(params.keys())
+
+ if include is not None:
+ for member in include:
+ parameter_names.append(member.name)
+
+ if exclude is not None:
+ for member in exclude:
+ if member in parameter_names:
+ parameter_names.remove(member)
+
+ signature_params = ''
+ if parameter_names:
+ signature_params = '**kwargs'
+ section.style.start_sphinx_py_method(name, signature_params)
+
+
+def document_custom_signature(
+ section, name, method, include=None, exclude=None
+):
+ """Documents the signature of a custom method
+
+ :param section: The section to write the documentation to.
+
+ :param name: The name of the method
+
+ :param method: The handle to the method being documented
+
+ :type include: Dictionary where keys are parameter names and
+ values are the shapes of the parameter names.
+ :param include: The parameter shapes to include in the documentation.
+
+ :type exclude: List of the names of the parameters to exclude.
+ :param exclude: The names of the parameters to exclude from
+ documentation.
+ """
+ signature = inspect.signature(method)
+ # "raw" class methods are FunctionType and they include "self" param
+ # object methods are MethodType and they skip the "self" param
+ if isinstance(method, types.FunctionType):
+ self_param = next(iter(signature.parameters))
+ self_kind = signature.parameters[self_param].kind
+ # safety check that we got the right parameter
+ assert self_kind == inspect.Parameter.POSITIONAL_OR_KEYWORD
+ new_params = signature.parameters.copy()
+ del new_params[self_param]
+ signature = signature.replace(parameters=new_params.values())
+ signature_params = str(signature).lstrip('(')
+ signature_params = signature_params.rstrip(')')
+ section.style.start_sphinx_py_method(name, signature_params)
+
+
+def document_custom_method(section, method_name, method):
+ """Documents a non-data driven method
+
+ :param section: The section to write the documentation to.
+
+ :param method_name: The name of the method
+
+ :param method: The handle to the method being documented
+ """
+ full_method_name = f"{section.context.get('qualifier', '')}{method_name}"
+ document_custom_signature(section, full_method_name, method)
+ method_intro_section = section.add_new_section('method-intro')
+ method_intro_section.writeln('')
+ doc_string = inspect.getdoc(method)
+ if doc_string is not None:
+ method_intro_section.style.write_py_doc_string(doc_string)
+
+
+def document_model_driven_method(
+ section,
+ method_name,
+ operation_model,
+ event_emitter,
+ method_description=None,
+ example_prefix=None,
+ include_input=None,
+ include_output=None,
+ exclude_input=None,
+ exclude_output=None,
+ document_output=True,
+ include_signature=True,
+):
+ """Documents an individual method
+
+ :param section: The section to write to
+
+ :param method_name: The name of the method
+
+ :param operation_model: The model of the operation
+
+ :param event_emitter: The event emitter to use to emit events
+
+ :param example_prefix: The prefix to use in the method example.
+
+ :type include_input: Dictionary where keys are parameter names and
+ values are the shapes of the parameter names.
+ :param include_input: The parameter shapes to include in the
+ input documentation.
+
+ :type include_output: Dictionary where keys are parameter names and
+ values are the shapes of the parameter names.
+ :param include_input: The parameter shapes to include in the
+ output documentation.
+
+ :type exclude_input: List of the names of the parameters to exclude.
+ :param exclude_input: The names of the parameters to exclude from
+ input documentation.
+
+ :type exclude_output: List of the names of the parameters to exclude.
+ :param exclude_input: The names of the parameters to exclude from
+ output documentation.
+
+ :param document_output: A boolean flag to indicate whether to
+ document the output.
+
+ :param include_signature: Whether or not to include the signature.
+ It is useful for generating docstrings.
+ """
+ # Add the signature if specified.
+ if include_signature:
+ document_model_driven_signature(
+ section,
+ method_name,
+ operation_model,
+ include=include_input,
+ exclude=exclude_input,
+ )
+
+ # Add the description for the method.
+ method_intro_section = section.add_new_section('method-intro')
+ method_intro_section.include_doc_string(method_description)
+ if operation_model.deprecated:
+ method_intro_section.style.start_danger()
+ method_intro_section.writeln(
+ 'This operation is deprecated and may not function as '
+ 'expected. This operation should not be used going forward '
+ 'and is only kept for the purpose of backwards compatiblity.'
+ )
+ method_intro_section.style.end_danger()
+ service_uid = operation_model.service_model.metadata.get('uid')
+ if service_uid is not None:
+ method_intro_section.style.new_paragraph()
+ method_intro_section.write("See also: ")
+ link = f"{AWS_DOC_BASE}/{service_uid}/{operation_model.name}"
+ method_intro_section.style.external_link(
+ title="AWS API Documentation", link=link
+ )
+ method_intro_section.writeln('')
+
+ # Add the example section.
+ example_section = section.add_new_section('request-example')
+ example_section.style.new_paragraph()
+ example_section.style.bold('Request Syntax')
+
+ context = {
+ 'special_shape_types': {
+ 'streaming_input_shape': operation_model.get_streaming_input(),
+ 'streaming_output_shape': operation_model.get_streaming_output(),
+ 'eventstream_output_shape': operation_model.get_event_stream_output(),
+ },
+ }
+
+ if operation_model.input_shape:
+ RequestExampleDocumenter(
+ service_name=operation_model.service_model.service_name,
+ operation_name=operation_model.name,
+ event_emitter=event_emitter,
+ context=context,
+ ).document_example(
+ example_section,
+ operation_model.input_shape,
+ prefix=example_prefix,
+ include=include_input,
+ exclude=exclude_input,
+ )
+ else:
+ example_section.style.new_paragraph()
+ example_section.style.start_codeblock()
+ example_section.write(example_prefix + '()')
+
+ # Add the request parameter documentation.
+ request_params_section = section.add_new_section('request-params')
+ if operation_model.input_shape:
+ RequestParamsDocumenter(
+ service_name=operation_model.service_model.service_name,
+ operation_name=operation_model.name,
+ event_emitter=event_emitter,
+ context=context,
+ ).document_params(
+ request_params_section,
+ operation_model.input_shape,
+ include=include_input,
+ exclude=exclude_input,
+ )
+
+ # Add the return value documentation
+ return_section = section.add_new_section('return')
+ return_section.style.new_line()
+ if operation_model.output_shape is not None and document_output:
+ return_section.write(':rtype: dict')
+ return_section.style.new_line()
+ return_section.write(':returns: ')
+ return_section.style.indent()
+ return_section.style.new_line()
+
+ # If the operation is an event stream, describe the tagged union
+ event_stream_output = operation_model.get_event_stream_output()
+ if event_stream_output:
+ event_section = return_section.add_new_section('event-stream')
+ event_section.style.new_paragraph()
+ event_section.write(
+ 'The response of this operation contains an '
+ ':class:`.EventStream` member. When iterated the '
+ ':class:`.EventStream` will yield events based on the '
+ 'structure below, where only one of the top level keys '
+ 'will be present for any given event.'
+ )
+ event_section.style.new_line()
+
+ # Add an example return value
+ return_example_section = return_section.add_new_section(
+ 'response-example'
+ )
+ return_example_section.style.new_line()
+ return_example_section.style.bold('Response Syntax')
+ return_example_section.style.new_paragraph()
+ ResponseExampleDocumenter(
+ service_name=operation_model.service_model.service_name,
+ operation_name=operation_model.name,
+ event_emitter=event_emitter,
+ context=context,
+ ).document_example(
+ return_example_section,
+ operation_model.output_shape,
+ include=include_output,
+ exclude=exclude_output,
+ )
+
+ # Add a description for the return value
+ return_description_section = return_section.add_new_section(
+ 'description'
+ )
+ return_description_section.style.new_line()
+ return_description_section.style.bold('Response Structure')
+ return_description_section.style.new_paragraph()
+ ResponseParamsDocumenter(
+ service_name=operation_model.service_model.service_name,
+ operation_name=operation_model.name,
+ event_emitter=event_emitter,
+ context=context,
+ ).document_params(
+ return_description_section,
+ operation_model.output_shape,
+ include=include_output,
+ exclude=exclude_output,
+ )
+ else:
+ return_section.write(':returns: None')
diff --git a/py311/lib/python3.11/site-packages/botocore/docs/paginator.py b/py311/lib/python3.11/site-packages/botocore/docs/paginator.py
new file mode 100644
index 0000000000000000000000000000000000000000..2c9b30034f8c4feb64a8e01de86f476488edac63
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/docs/paginator.py
@@ -0,0 +1,241 @@
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import os
+
+from botocore import xform_name
+from botocore.compat import OrderedDict
+from botocore.docs.bcdoc.restdoc import DocumentStructure
+from botocore.docs.method import document_model_driven_method
+from botocore.docs.utils import DocumentedShape
+from botocore.utils import get_service_module_name
+
+
+class PaginatorDocumenter:
+ def __init__(self, client, service_paginator_model, root_docs_path):
+ self._client = client
+ self._client_class_name = self._client.__class__.__name__
+ self._service_name = self._client.meta.service_model.service_name
+ self._service_paginator_model = service_paginator_model
+ self._root_docs_path = root_docs_path
+ self._USER_GUIDE_LINK = (
+ 'https://boto3.amazonaws.com/'
+ 'v1/documentation/api/latest/guide/paginators.html'
+ )
+
+ def document_paginators(self, section):
+ """Documents the various paginators for a service
+
+ param section: The section to write to.
+ """
+ section.style.h2('Paginators')
+ self._add_overview(section)
+ section.style.new_line()
+ section.writeln('The available paginators are:')
+ section.style.toctree()
+
+ paginator_names = sorted(
+ self._service_paginator_model._paginator_config
+ )
+
+ # List the available paginators and then document each paginator.
+ for paginator_name in paginator_names:
+ section.style.tocitem(
+ f'{self._service_name}/paginator/{paginator_name}'
+ )
+ # Create a new DocumentStructure for each paginator and add contents.
+ paginator_doc_structure = DocumentStructure(
+ paginator_name, target='html'
+ )
+ self._add_paginator(paginator_doc_structure, paginator_name)
+ # Write paginators in individual/nested files.
+ # Path: /reference/services//paginator/.rst
+ paginator_dir_path = os.path.join(
+ self._root_docs_path, self._service_name, 'paginator'
+ )
+ paginator_doc_structure.write_to_file(
+ paginator_dir_path, paginator_name
+ )
+
+ def _add_paginator(self, section, paginator_name):
+ breadcrumb_section = section.add_new_section('breadcrumb')
+ breadcrumb_section.style.ref(
+ self._client_class_name, f'../../{self._service_name}'
+ )
+ breadcrumb_section.write(f' / Paginator / {paginator_name}')
+ section.add_title_section(paginator_name)
+
+ # Docment the paginator class
+ paginator_section = section.add_new_section(paginator_name)
+ paginator_section.style.start_sphinx_py_class(
+ class_name=(
+ f'{self._client_class_name}.Paginator.{paginator_name}'
+ )
+ )
+ paginator_section.style.start_codeblock()
+ paginator_section.style.new_line()
+
+ # Document how to instantiate the paginator.
+ paginator_section.write(
+ f"paginator = client.get_paginator('{xform_name(paginator_name)}')"
+ )
+ paginator_section.style.end_codeblock()
+ paginator_section.style.new_line()
+ # Get the pagination model for the particular paginator.
+ paginator_config = self._service_paginator_model.get_paginator(
+ paginator_name
+ )
+ document_paginate_method(
+ section=paginator_section,
+ paginator_name=paginator_name,
+ event_emitter=self._client.meta.events,
+ service_model=self._client.meta.service_model,
+ paginator_config=paginator_config,
+ )
+
+ def _add_overview(self, section):
+ section.style.new_line()
+ section.write(
+ 'Paginators are available on a client instance '
+ 'via the ``get_paginator`` method. For more detailed instructions '
+ 'and examples on the usage of paginators, see the '
+ 'paginators '
+ )
+ section.style.external_link(
+ title='user guide',
+ link=self._USER_GUIDE_LINK,
+ )
+ section.write('.')
+ section.style.new_line()
+
+
+def document_paginate_method(
+ section,
+ paginator_name,
+ event_emitter,
+ service_model,
+ paginator_config,
+ include_signature=True,
+):
+ """Documents the paginate method of a paginator
+
+ :param section: The section to write to
+
+ :param paginator_name: The name of the paginator. It is snake cased.
+
+ :param event_emitter: The event emitter to use to emit events
+
+ :param service_model: The service model
+
+ :param paginator_config: The paginator config associated to a particular
+ paginator.
+
+ :param include_signature: Whether or not to include the signature.
+ It is useful for generating docstrings.
+ """
+ # Retrieve the operation model of the underlying operation.
+ operation_model = service_model.operation_model(paginator_name)
+
+ # Add representations of the request and response parameters
+ # we want to include in the description of the paginate method.
+ # These are parameters we expose via the botocore interface.
+ pagination_config_members = OrderedDict()
+
+ pagination_config_members['MaxItems'] = DocumentedShape(
+ name='MaxItems',
+ type_name='integer',
+ documentation=(
+ '
The total number of items to return. If the total '
+ 'number of items available is more than the value '
+ 'specified in max-items then a NextToken '
+ 'will be provided in the output that you can use to '
+ 'resume pagination.
'),
+ )
+ ]
+
+ service_pagination_params = []
+
+ # Add the normal input token of the method to a list
+ # of input paramters that we wish to hide since we expose our own.
+ if isinstance(paginator_config['input_token'], list):
+ service_pagination_params += paginator_config['input_token']
+ else:
+ service_pagination_params.append(paginator_config['input_token'])
+
+ # Hide the limit key in the documentation.
+ if paginator_config.get('limit_key', None):
+ service_pagination_params.append(paginator_config['limit_key'])
+
+ # Hide the output tokens in the documentation.
+ service_pagination_response_params = []
+ if isinstance(paginator_config['output_token'], list):
+ service_pagination_response_params += paginator_config['output_token']
+ else:
+ service_pagination_response_params.append(
+ paginator_config['output_token']
+ )
+
+ paginate_description = (
+ 'Creates an iterator that will paginate through responses '
+ f'from :py:meth:`{get_service_module_name(service_model)}.Client.{xform_name(paginator_name)}`.'
+ )
+
+ document_model_driven_method(
+ section,
+ 'paginate',
+ operation_model,
+ event_emitter=event_emitter,
+ method_description=paginate_description,
+ example_prefix='response_iterator = paginator.paginate',
+ include_input=botocore_pagination_params,
+ include_output=botocore_pagination_response_params,
+ exclude_input=service_pagination_params,
+ exclude_output=service_pagination_response_params,
+ include_signature=include_signature,
+ )
diff --git a/py311/lib/python3.11/site-packages/botocore/docs/params.py b/py311/lib/python3.11/site-packages/botocore/docs/params.py
new file mode 100644
index 0000000000000000000000000000000000000000..7e0f398b09f9552d4053edab2335c7e7c96bb1be
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/docs/params.py
@@ -0,0 +1,302 @@
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore.docs.shape import ShapeDocumenter
+from botocore.docs.utils import py_type_name
+
+
+class BaseParamsDocumenter(ShapeDocumenter):
+ def document_params(self, section, shape, include=None, exclude=None):
+ """Fills out the documentation for a section given a model shape.
+
+ :param section: The section to write the documentation to.
+
+ :param shape: The shape of the operation.
+
+ :type include: Dictionary where keys are parameter names and
+ values are the shapes of the parameter names.
+ :param include: The parameter shapes to include in the documentation.
+
+ :type exclude: List of the names of the parameters to exclude.
+ :param exclude: The names of the parameters to exclude from
+ documentation.
+ """
+ history = []
+ self.traverse_and_document_shape(
+ section=section,
+ shape=shape,
+ history=history,
+ name=None,
+ include=include,
+ exclude=exclude,
+ )
+
+ def document_recursive_shape(self, section, shape, **kwargs):
+ self._add_member_documentation(section, shape, **kwargs)
+
+ def document_shape_default(
+ self, section, shape, history, include=None, exclude=None, **kwargs
+ ):
+ self._add_member_documentation(section, shape, **kwargs)
+
+ def document_shape_type_list(
+ self, section, shape, history, include=None, exclude=None, **kwargs
+ ):
+ self._add_member_documentation(section, shape, **kwargs)
+ param_shape = shape.member
+ param_section = section.add_new_section(
+ param_shape.name, context={'shape': shape.member.name}
+ )
+ self._start_nested_param(param_section)
+ self.traverse_and_document_shape(
+ section=param_section,
+ shape=param_shape,
+ history=history,
+ name=None,
+ )
+ section = section.add_new_section('end-list')
+ self._end_nested_param(section)
+
+ def document_shape_type_map(
+ self, section, shape, history, include=None, exclude=None, **kwargs
+ ):
+ self._add_member_documentation(section, shape, **kwargs)
+
+ key_section = section.add_new_section(
+ 'key', context={'shape': shape.key.name}
+ )
+ self._start_nested_param(key_section)
+ self._add_member_documentation(key_section, shape.key)
+
+ param_section = section.add_new_section(
+ shape.value.name, context={'shape': shape.value.name}
+ )
+ param_section.style.indent()
+ self._start_nested_param(param_section)
+ self.traverse_and_document_shape(
+ section=param_section,
+ shape=shape.value,
+ history=history,
+ name=None,
+ )
+
+ end_section = section.add_new_section('end-map')
+ self._end_nested_param(end_section)
+ self._end_nested_param(end_section)
+
+ def document_shape_type_structure(
+ self,
+ section,
+ shape,
+ history,
+ include=None,
+ exclude=None,
+ name=None,
+ **kwargs,
+ ):
+ members = self._add_members_to_shape(shape.members, include)
+ self._add_member_documentation(section, shape, name=name)
+ for param in members:
+ if exclude and param in exclude:
+ continue
+ param_shape = members[param]
+ param_section = section.add_new_section(
+ param, context={'shape': param_shape.name}
+ )
+ self._start_nested_param(param_section)
+ self.traverse_and_document_shape(
+ section=param_section,
+ shape=param_shape,
+ history=history,
+ name=param,
+ )
+ section = section.add_new_section('end-structure')
+ self._end_nested_param(section)
+
+ def _add_member_documentation(self, section, shape, **kwargs):
+ pass
+
+ def _add_members_to_shape(self, members, include):
+ if include:
+ members = members.copy()
+ for param in include:
+ members[param.name] = param
+ return members
+
+ def _document_non_top_level_param_type(self, type_section, shape):
+ special_py_type = self._get_special_py_type_name(shape)
+ py_type = py_type_name(shape.type_name)
+
+ type_format = '(%s) --'
+ if special_py_type is not None:
+ # Special type can reference a linked class.
+ # Italicizing it blows away the link.
+ type_section.write(type_format % special_py_type)
+ else:
+ type_section.style.italics(type_format % py_type)
+ type_section.write(' ')
+
+ def _start_nested_param(self, section):
+ section.style.indent()
+ section.style.new_line()
+
+ def _end_nested_param(self, section):
+ section.style.dedent()
+ section.style.new_line()
+
+
+class ResponseParamsDocumenter(BaseParamsDocumenter):
+ """Generates the description for the response parameters"""
+
+ EVENT_NAME = 'response-params'
+
+ def _add_member_documentation(self, section, shape, name=None, **kwargs):
+ name_section = section.add_new_section('param-name')
+ name_section.write('- ')
+ if name is not None:
+ name_section.style.bold(f'{name}')
+ name_section.write(' ')
+ type_section = section.add_new_section('param-type')
+ self._document_non_top_level_param_type(type_section, shape)
+
+ documentation_section = section.add_new_section('param-documentation')
+ if shape.documentation:
+ documentation_section.style.indent()
+ if getattr(shape, 'is_tagged_union', False):
+ tagged_union_docs = section.add_new_section(
+ 'param-tagged-union-docs'
+ )
+ note = (
+ '.. note::'
+ ' This is a Tagged Union structure. Only one of the '
+ ' following top level keys will be set: %s. '
+ ' If a client receives an unknown member it will '
+ ' set ``SDK_UNKNOWN_MEMBER`` as the top level key, '
+ ' which maps to the name or tag of the unknown '
+ ' member. The structure of ``SDK_UNKNOWN_MEMBER`` is '
+ ' as follows'
+ )
+ tagged_union_members_str = ', '.join(
+ [f'``{key}``' for key in shape.members.keys()]
+ )
+ unknown_code_example = (
+ '\'SDK_UNKNOWN_MEMBER\': {\'name\': \'UnknownMemberName\'}'
+ )
+ tagged_union_docs.write(note % (tagged_union_members_str))
+ example = section.add_new_section('param-unknown-example')
+ example.style.codeblock(unknown_code_example)
+ documentation_section.include_doc_string(shape.documentation)
+ section.style.new_paragraph()
+
+ def document_shape_type_event_stream(
+ self, section, shape, history, **kwargs
+ ):
+ self.document_shape_type_structure(section, shape, history, **kwargs)
+
+
+class RequestParamsDocumenter(BaseParamsDocumenter):
+ """Generates the description for the request parameters"""
+
+ EVENT_NAME = 'request-params'
+
+ def document_shape_type_structure(
+ self, section, shape, history, include=None, exclude=None, **kwargs
+ ):
+ if len(history) > 1:
+ self._add_member_documentation(section, shape, **kwargs)
+ section.style.indent()
+ members = self._add_members_to_shape(shape.members, include)
+ for i, param in enumerate(members):
+ if exclude and param in exclude:
+ continue
+ param_shape = members[param]
+ param_section = section.add_new_section(
+ param, context={'shape': param_shape.name}
+ )
+ param_section.style.new_line()
+ is_required = param in shape.required_members
+ self.traverse_and_document_shape(
+ section=param_section,
+ shape=param_shape,
+ history=history,
+ name=param,
+ is_required=is_required,
+ )
+ section = section.add_new_section('end-structure')
+ if len(history) > 1:
+ section.style.dedent()
+ section.style.new_line()
+
+ def _add_member_documentation(
+ self,
+ section,
+ shape,
+ name=None,
+ is_top_level_param=False,
+ is_required=False,
+ **kwargs,
+ ):
+ py_type = self._get_special_py_type_name(shape)
+ if py_type is None:
+ py_type = py_type_name(shape.type_name)
+ if is_top_level_param:
+ type_section = section.add_new_section('param-type')
+ type_section.write(f':type {name}: {py_type}')
+ end_type_section = type_section.add_new_section('end-param-type')
+ end_type_section.style.new_line()
+ name_section = section.add_new_section('param-name')
+ name_section.write(f':param {name}: ')
+
+ else:
+ name_section = section.add_new_section('param-name')
+ name_section.write('- ')
+ if name is not None:
+ name_section.style.bold(f'{name}')
+ name_section.write(' ')
+ type_section = section.add_new_section('param-type')
+ self._document_non_top_level_param_type(type_section, shape)
+
+ if is_required:
+ is_required_section = section.add_new_section('is-required')
+ is_required_section.style.indent()
+ is_required_section.style.bold('[REQUIRED]')
+ is_required_section.write(' ')
+ if shape.documentation:
+ documentation_section = section.add_new_section(
+ 'param-documentation'
+ )
+ documentation_section.style.indent()
+ if getattr(shape, 'is_tagged_union', False):
+ tagged_union_docs = section.add_new_section(
+ 'param-tagged-union-docs'
+ )
+ note = (
+ '.. note::'
+ ' This is a Tagged Union structure. Only one of the '
+ ' following top level keys can be set: %s. '
+ )
+ tagged_union_members_str = ', '.join(
+ [f'``{key}``' for key in shape.members.keys()]
+ )
+ tagged_union_docs.write(note % (tagged_union_members_str))
+ documentation_section.include_doc_string(shape.documentation)
+ self._add_special_trait_documentation(documentation_section, shape)
+ end_param_section = section.add_new_section('end-param')
+ end_param_section.style.new_paragraph()
+
+ def _add_special_trait_documentation(self, section, shape):
+ if 'idempotencyToken' in shape.metadata:
+ self._append_idempotency_documentation(section)
+
+ def _append_idempotency_documentation(self, section):
+ docstring = 'This field is autopopulated if not provided.'
+ section.write(docstring)
diff --git a/py311/lib/python3.11/site-packages/botocore/docs/service.py b/py311/lib/python3.11/site-packages/botocore/docs/service.py
new file mode 100644
index 0000000000000000000000000000000000000000..d20a889dc955a71eb3b40e1a4b7eabc549941367
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/docs/service.py
@@ -0,0 +1,133 @@
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from botocore.docs.bcdoc.restdoc import DocumentStructure
+from botocore.docs.client import (
+ ClientContextParamsDocumenter,
+ ClientDocumenter,
+ ClientExceptionsDocumenter,
+)
+from botocore.docs.paginator import PaginatorDocumenter
+from botocore.docs.waiter import WaiterDocumenter
+from botocore.exceptions import DataNotFoundError
+
+
+class ServiceDocumenter:
+ def __init__(self, service_name, session, root_docs_path):
+ self._session = session
+ self._service_name = service_name
+ self._root_docs_path = root_docs_path
+
+ self._client = self._session.create_client(
+ service_name,
+ region_name='us-east-1',
+ aws_access_key_id='foo',
+ aws_secret_access_key='bar',
+ )
+ self._event_emitter = self._client.meta.events
+
+ self.sections = [
+ 'title',
+ 'client-api',
+ 'client-exceptions',
+ 'paginator-api',
+ 'waiter-api',
+ 'client-context-params',
+ ]
+
+ def document_service(self):
+ """Documents an entire service.
+
+ :returns: The reStructured text of the documented service.
+ """
+ doc_structure = DocumentStructure(
+ self._service_name, section_names=self.sections, target='html'
+ )
+ self.title(doc_structure.get_section('title'))
+ self.client_api(doc_structure.get_section('client-api'))
+ self.client_exceptions(doc_structure.get_section('client-exceptions'))
+ self.paginator_api(doc_structure.get_section('paginator-api'))
+ self.waiter_api(doc_structure.get_section('waiter-api'))
+ context_params_section = doc_structure.get_section(
+ 'client-context-params'
+ )
+ self.client_context_params(context_params_section)
+ return doc_structure.flush_structure()
+
+ def title(self, section):
+ section.style.h1(self._client.__class__.__name__)
+ self._event_emitter.emit(
+ f"docs.title.{self._service_name}", section=section
+ )
+
+ def table_of_contents(self, section):
+ section.style.table_of_contents(title='Table of Contents', depth=2)
+
+ def client_api(self, section):
+ examples = None
+ try:
+ examples = self.get_examples(self._service_name)
+ except DataNotFoundError:
+ pass
+
+ ClientDocumenter(
+ self._client, self._root_docs_path, examples
+ ).document_client(section)
+
+ def client_exceptions(self, section):
+ ClientExceptionsDocumenter(
+ self._client, self._root_docs_path
+ ).document_exceptions(section)
+
+ def paginator_api(self, section):
+ try:
+ service_paginator_model = self._session.get_paginator_model(
+ self._service_name
+ )
+ except DataNotFoundError:
+ return
+ if service_paginator_model._paginator_config:
+ paginator_documenter = PaginatorDocumenter(
+ self._client, service_paginator_model, self._root_docs_path
+ )
+ paginator_documenter.document_paginators(section)
+
+ def waiter_api(self, section):
+ if self._client.waiter_names:
+ service_waiter_model = self._session.get_waiter_model(
+ self._service_name
+ )
+ waiter_documenter = WaiterDocumenter(
+ self._client, service_waiter_model, self._root_docs_path
+ )
+ waiter_documenter.document_waiters(section)
+
+ def get_examples(self, service_name, api_version=None):
+ loader = self._session.get_component('data_loader')
+ examples = loader.load_service_model(
+ service_name, 'examples-1', api_version
+ )
+ return examples['examples']
+
+ def client_context_params(self, section):
+ omitted_params = ClientContextParamsDocumenter.OMITTED_CONTEXT_PARAMS
+ params_to_omit = omitted_params.get(self._service_name, [])
+ service_model = self._client.meta.service_model
+ raw_context_params = service_model.client_context_parameters
+ context_params = [
+ p for p in raw_context_params if p.name not in params_to_omit
+ ]
+ if context_params:
+ context_param_documenter = ClientContextParamsDocumenter(
+ self._service_name, context_params
+ )
+ context_param_documenter.document_context_params(section)
diff --git a/py311/lib/python3.11/site-packages/botocore/docs/shape.py b/py311/lib/python3.11/site-packages/botocore/docs/shape.py
new file mode 100644
index 0000000000000000000000000000000000000000..640a5d18ef390b9c4456d24233d863974144c947
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/docs/shape.py
@@ -0,0 +1,135 @@
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+
+# NOTE: This class should not be instantiated and its
+# ``traverse_and_document_shape`` method called directly. It should be
+# inherited from a Documenter class with the appropriate methods
+# and attributes.
+from botocore.utils import is_json_value_header
+
+
+class ShapeDocumenter:
+ EVENT_NAME = ''
+
+ def __init__(
+ self, service_name, operation_name, event_emitter, context=None
+ ):
+ self._service_name = service_name
+ self._operation_name = operation_name
+ self._event_emitter = event_emitter
+ self._context = context
+ if context is None:
+ self._context = {'special_shape_types': {}}
+
+ def traverse_and_document_shape(
+ self,
+ section,
+ shape,
+ history,
+ include=None,
+ exclude=None,
+ name=None,
+ is_required=False,
+ ):
+ """Traverses and documents a shape
+
+ Will take a self class and call its appropriate methods as a shape
+ is traversed.
+
+ :param section: The section to document.
+
+ :param history: A list of the names of the shapes that have been
+ traversed.
+
+ :type include: Dictionary where keys are parameter names and
+ values are the shapes of the parameter names.
+ :param include: The parameter shapes to include in the documentation.
+
+ :type exclude: List of the names of the parameters to exclude.
+ :param exclude: The names of the parameters to exclude from
+ documentation.
+
+ :param name: The name of the shape.
+
+ :param is_required: If the shape is a required member.
+ """
+ param_type = shape.type_name
+ if getattr(shape, 'serialization', {}).get('eventstream'):
+ param_type = 'event_stream'
+ if shape.name in history:
+ self.document_recursive_shape(section, shape, name=name)
+ else:
+ history.append(shape.name)
+ is_top_level_param = len(history) == 2
+ if hasattr(shape, 'is_document_type') and shape.is_document_type:
+ param_type = 'document'
+ getattr(
+ self,
+ f"document_shape_type_{param_type}",
+ self.document_shape_default,
+ )(
+ section,
+ shape,
+ history=history,
+ name=name,
+ include=include,
+ exclude=exclude,
+ is_top_level_param=is_top_level_param,
+ is_required=is_required,
+ )
+ if is_top_level_param:
+ self._event_emitter.emit(
+ f"docs.{self.EVENT_NAME}.{self._service_name}.{self._operation_name}.{name}",
+ section=section,
+ )
+ at_overlying_method_section = len(history) == 1
+ if at_overlying_method_section:
+ self._event_emitter.emit(
+ f"docs.{self.EVENT_NAME}.{self._service_name}.{self._operation_name}.complete-section",
+ section=section,
+ )
+ history.pop()
+
+ def _get_special_py_default(self, shape):
+ special_defaults = {
+ 'document_type': '{...}|[...]|123|123.4|\'string\'|True|None',
+ 'jsonvalue_header': '{...}|[...]|123|123.4|\'string\'|True|None',
+ 'streaming_input_shape': 'b\'bytes\'|file',
+ 'streaming_output_shape': 'StreamingBody()',
+ 'eventstream_output_shape': 'EventStream()',
+ }
+ return self._get_value_for_special_type(shape, special_defaults)
+
+ def _get_special_py_type_name(self, shape):
+ special_type_names = {
+ 'document_type': ':ref:`document`',
+ 'jsonvalue_header': 'JSON serializable',
+ 'streaming_input_shape': 'bytes or seekable file-like object',
+ 'streaming_output_shape': ':class:`.StreamingBody`',
+ 'eventstream_output_shape': ':class:`.EventStream`',
+ }
+ return self._get_value_for_special_type(shape, special_type_names)
+
+ def _get_value_for_special_type(self, shape, special_type_map):
+ if is_json_value_header(shape):
+ return special_type_map['jsonvalue_header']
+ if hasattr(shape, 'is_document_type') and shape.is_document_type:
+ return special_type_map['document_type']
+ for special_type, marked_shape in self._context[
+ 'special_shape_types'
+ ].items():
+ if special_type in special_type_map:
+ if shape == marked_shape:
+ return special_type_map[special_type]
+ return None
diff --git a/py311/lib/python3.11/site-packages/botocore/docs/sharedexample.py b/py311/lib/python3.11/site-packages/botocore/docs/sharedexample.py
new file mode 100644
index 0000000000000000000000000000000000000000..29d3df5fc969daf26bba380b571897195387ad78
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/docs/sharedexample.py
@@ -0,0 +1,227 @@
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import numbers
+import re
+
+from botocore.docs.utils import escape_controls
+from botocore.utils import parse_timestamp
+
+
+class SharedExampleDocumenter:
+ def document_shared_example(
+ self, example, prefix, section, operation_model
+ ):
+ """Documents a single shared example based on its definition.
+
+ :param example: The model of the example
+
+ :param prefix: The prefix to use in the method example.
+
+ :param section: The section to write to.
+
+ :param operation_model: The model of the operation used in the example
+ """
+ section.style.new_paragraph()
+ section.write(example.get('description'))
+ section.style.new_line()
+ self.document_input(
+ section, example, prefix, operation_model.input_shape
+ )
+ self.document_output(section, example, operation_model.output_shape)
+
+ def document_input(self, section, example, prefix, shape):
+ input_section = section.add_new_section('input')
+ input_section.style.start_codeblock()
+ if prefix is not None:
+ input_section.write(prefix)
+ params = example.get('input', {})
+ comments = example.get('comments')
+ if comments:
+ comments = comments.get('input')
+ param_section = input_section.add_new_section('parameters')
+ self._document_params(param_section, params, comments, [], shape)
+ closing_section = input_section.add_new_section('input-close')
+ closing_section.style.new_line()
+ closing_section.style.new_line()
+ closing_section.write('print(response)')
+ closing_section.style.end_codeblock()
+
+ def document_output(self, section, example, shape):
+ output_section = section.add_new_section('output')
+ output_section.style.new_line()
+ output_section.write('Expected Output:')
+ output_section.style.new_line()
+ output_section.style.start_codeblock()
+ params = example.get('output', {})
+
+ # There might not be an output, but we will return metadata anyway
+ params['ResponseMetadata'] = {"...": "..."}
+ comments = example.get('comments')
+ if comments:
+ comments = comments.get('output')
+ self._document_dict(output_section, params, comments, [], shape, True)
+ closing_section = output_section.add_new_section('output-close')
+ closing_section.style.end_codeblock()
+
+ def _document(self, section, value, comments, path, shape):
+ """
+ :param section: The section to add the docs to.
+
+ :param value: The input / output values representing the parameters that
+ are included in the example.
+
+ :param comments: The dictionary containing all the comments to be
+ applied to the example.
+
+ :param path: A list describing where the documenter is in traversing the
+ parameters. This is used to find the equivalent location
+ in the comments dictionary.
+ """
+ if isinstance(value, dict):
+ self._document_dict(section, value, comments, path, shape)
+ elif isinstance(value, list):
+ self._document_list(section, value, comments, path, shape)
+ elif isinstance(value, numbers.Number):
+ self._document_number(section, value, path)
+ elif shape and shape.type_name == 'timestamp':
+ self._document_datetime(section, value, path)
+ else:
+ self._document_str(section, value, path)
+
+ def _document_dict(
+ self, section, value, comments, path, shape, top_level=False
+ ):
+ dict_section = section.add_new_section('dict-value')
+ self._start_nested_value(dict_section, '{')
+ for key, val in value.items():
+ path.append(f'.{key}')
+ item_section = dict_section.add_new_section(key)
+ item_section.style.new_line()
+ item_comment = self._get_comment(path, comments)
+ if item_comment:
+ item_section.write(item_comment)
+ item_section.style.new_line()
+ item_section.write(f"'{key}': ")
+
+ # Shape could be none if there is no output besides ResponseMetadata
+ item_shape = None
+ if shape:
+ if shape.type_name == 'structure':
+ item_shape = shape.members.get(key)
+ elif shape.type_name == 'map':
+ item_shape = shape.value
+ self._document(item_section, val, comments, path, item_shape)
+ path.pop()
+ dict_section_end = dict_section.add_new_section('ending-brace')
+ self._end_nested_value(dict_section_end, '}')
+ if not top_level:
+ dict_section_end.write(',')
+
+ def _document_params(self, section, value, comments, path, shape):
+ param_section = section.add_new_section('param-values')
+ self._start_nested_value(param_section, '(')
+ for key, val in value.items():
+ path.append(f'.{key}')
+ item_section = param_section.add_new_section(key)
+ item_section.style.new_line()
+ item_comment = self._get_comment(path, comments)
+ if item_comment:
+ item_section.write(item_comment)
+ item_section.style.new_line()
+ item_section.write(key + '=')
+
+ # Shape could be none if there are no input parameters
+ item_shape = None
+ if shape:
+ item_shape = shape.members.get(key)
+ self._document(item_section, val, comments, path, item_shape)
+ path.pop()
+ param_section_end = param_section.add_new_section('ending-parenthesis')
+ self._end_nested_value(param_section_end, ')')
+
+ def _document_list(self, section, value, comments, path, shape):
+ list_section = section.add_new_section('list-section')
+ self._start_nested_value(list_section, '[')
+ item_shape = shape.member
+ for index, val in enumerate(value):
+ item_section = list_section.add_new_section(index)
+ item_section.style.new_line()
+ path.append(f'[{index}]')
+ item_comment = self._get_comment(path, comments)
+ if item_comment:
+ item_section.write(item_comment)
+ item_section.style.new_line()
+ self._document(item_section, val, comments, path, item_shape)
+ path.pop()
+ list_section_end = list_section.add_new_section('ending-bracket')
+ self._end_nested_value(list_section_end, '],')
+
+ def _document_str(self, section, value, path):
+ # We do the string conversion because this might accept a type that
+ # we don't specifically address.
+ safe_value = escape_controls(value)
+ section.write(f"'{safe_value}',")
+
+ def _document_number(self, section, value, path):
+ section.write(f"{str(value)},")
+
+ def _document_datetime(self, section, value, path):
+ datetime_tuple = parse_timestamp(value).timetuple()
+ datetime_str = str(datetime_tuple[0])
+ for i in range(1, len(datetime_tuple)):
+ datetime_str += ", " + str(datetime_tuple[i])
+ section.write(f"datetime({datetime_str}),")
+
+ def _get_comment(self, path, comments):
+ key = re.sub(r'^\.', '', ''.join(path))
+ if comments and key in comments:
+ return '# ' + comments[key]
+ else:
+ return ''
+
+ def _start_nested_value(self, section, start):
+ section.write(start)
+ section.style.indent()
+ section.style.indent()
+
+ def _end_nested_value(self, section, end):
+ section.style.dedent()
+ section.style.dedent()
+ section.style.new_line()
+ section.write(end)
+
+
+def document_shared_examples(
+ section, operation_model, example_prefix, shared_examples
+):
+ """Documents the shared examples
+
+ :param section: The section to write to.
+
+ :param operation_model: The model of the operation.
+
+ :param example_prefix: The prefix to use in the method example.
+
+ :param shared_examples: The shared JSON examples from the model.
+ """
+ container_section = section.add_new_section('shared-examples')
+ container_section.style.new_paragraph()
+ container_section.style.bold('Examples')
+ documenter = SharedExampleDocumenter()
+ for example in shared_examples:
+ documenter.document_shared_example(
+ example=example,
+ section=container_section.add_new_section(example['id']),
+ prefix=example_prefix,
+ operation_model=operation_model,
+ )
diff --git a/py311/lib/python3.11/site-packages/botocore/docs/translator.py b/py311/lib/python3.11/site-packages/botocore/docs/translator.py
new file mode 100644
index 0000000000000000000000000000000000000000..0b0a308930c6d3d525ceb5cc63abbe43b5c7559e
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/docs/translator.py
@@ -0,0 +1,62 @@
+# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+from docutils import nodes
+from sphinx.locale import admonitionlabels
+from sphinx.writers.html5 import HTML5Translator as SphinxHTML5Translator
+
+
+class BotoHTML5Translator(SphinxHTML5Translator):
+ """Extension of Sphinx's ``HTML5Translator`` for Botocore documentation."""
+
+ IGNORE_IMPLICIT_HEADINGS = [
+ '[REQUIRED]',
+ ]
+
+ def visit_admonition(self, node, name=""):
+ """Uses the h3 tag for admonition titles instead of the p tag."""
+ self.body.append(
+ self.starttag(node, "div", CLASS=("admonition " + name))
+ )
+ if name:
+ title = (
+ f"
{admonitionlabels[name]}
"
+ )
+ self.body.append(title)
+
+ def is_implicit_heading(self, node):
+ """Determines if a node is an implicit heading.
+
+ An implicit heading is represented by a paragraph node whose only
+ child is a strong node with text that isnt in `IGNORE_IMPLICIT_HEADINGS`.
+ """
+ return (
+ len(node) == 1
+ and isinstance(node[0], nodes.strong)
+ and len(node[0]) == 1
+ and isinstance(node[0][0], nodes.Text)
+ and node[0][0].astext() not in self.IGNORE_IMPLICIT_HEADINGS
+ )
+
+ def visit_paragraph(self, node):
+ """Visit a paragraph HTML element.
+
+ Replaces implicit headings with an h3 tag and defers to default
+ behavior for normal paragraph elements.
+ """
+ if self.is_implicit_heading(node):
+ text = node[0][0]
+ self.body.append(f'
{text}
\n')
+ # Do not visit the current nodes children or call its depart method.
+ raise nodes.SkipNode
+ else:
+ super().visit_paragraph(node)
diff --git a/py311/lib/python3.11/site-packages/botocore/docs/utils.py b/py311/lib/python3.11/site-packages/botocore/docs/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..161e260229155bbc38b4503475240bc4295fdf4c
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/docs/utils.py
@@ -0,0 +1,225 @@
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import re
+from collections import namedtuple
+
+
+def py_type_name(type_name):
+ """Get the Python type name for a given model type.
+
+ >>> py_type_name('list')
+ 'list'
+ >>> py_type_name('structure')
+ 'dict'
+
+ :rtype: string
+ """
+ return {
+ 'blob': 'bytes',
+ 'character': 'string',
+ 'double': 'float',
+ 'long': 'integer',
+ 'map': 'dict',
+ 'structure': 'dict',
+ 'timestamp': 'datetime',
+ }.get(type_name, type_name)
+
+
+def py_default(type_name):
+ """Get the Python default value for a given model type.
+
+ >>> py_default('string')
+ '\'string\''
+ >>> py_default('list')
+ '[...]'
+ >>> py_default('unknown')
+ '...'
+
+ :rtype: string
+ """
+ return {
+ 'double': '123.0',
+ 'long': '123',
+ 'integer': '123',
+ 'string': "'string'",
+ 'blob': "b'bytes'",
+ 'boolean': 'True|False',
+ 'list': '[...]',
+ 'map': '{...}',
+ 'structure': '{...}',
+ 'timestamp': 'datetime(2015, 1, 1)',
+ }.get(type_name, '...')
+
+
+def get_official_service_name(service_model):
+ """Generate the official name of an AWS Service
+
+ :param service_model: The service model representing the service
+ """
+ official_name = service_model.metadata.get('serviceFullName')
+ short_name = service_model.metadata.get('serviceAbbreviation', '')
+ if short_name.startswith('Amazon'):
+ short_name = short_name[7:]
+ if short_name.startswith('AWS'):
+ short_name = short_name[4:]
+ if short_name and short_name.lower() not in official_name.lower():
+ official_name += f' ({short_name})'
+ return official_name
+
+
+_DocumentedShape = namedtuple(
+ 'DocumentedShape',
+ [
+ 'name',
+ 'type_name',
+ 'documentation',
+ 'metadata',
+ 'members',
+ 'required_members',
+ ],
+)
+
+
+class DocumentedShape(_DocumentedShape):
+ """Use this class to inject new shapes into a model for documentation"""
+
+ def __new__(
+ cls,
+ name,
+ type_name,
+ documentation,
+ metadata=None,
+ members=None,
+ required_members=None,
+ ):
+ if metadata is None:
+ metadata = []
+ if members is None:
+ members = []
+ if required_members is None:
+ required_members = []
+ return super().__new__(
+ cls,
+ name,
+ type_name,
+ documentation,
+ metadata,
+ members,
+ required_members,
+ )
+
+
+class AutoPopulatedParam:
+ def __init__(self, name, param_description=None):
+ self.name = name
+ self.param_description = param_description
+ if param_description is None:
+ self.param_description = (
+ 'Please note that this parameter is automatically populated '
+ 'if it is not provided. Including this parameter is not '
+ 'required\n'
+ )
+
+ def document_auto_populated_param(self, event_name, section, **kwargs):
+ """Documents auto populated parameters
+
+ It will remove any required marks for the parameter, remove the
+ parameter from the example, and add a snippet about the parameter
+ being autopopulated in the description.
+ """
+ if event_name.startswith('docs.request-params'):
+ if self.name in section.available_sections:
+ section = section.get_section(self.name)
+ if 'is-required' in section.available_sections:
+ section.delete_section('is-required')
+ description_section = section.get_section(
+ 'param-documentation'
+ )
+ description_section.writeln(self.param_description)
+ elif event_name.startswith('docs.request-example'):
+ section = section.get_section('structure-value')
+ if self.name in section.available_sections:
+ section.delete_section(self.name)
+
+
+class HideParamFromOperations:
+ """Hides a single parameter from multiple operations.
+
+ This method will remove a parameter from documentation and from
+ examples. This method is typically used for things that are
+ automatically populated because a user would be unable to provide
+ a value (e.g., a checksum of a serialized XML request body)."""
+
+ def __init__(self, service_name, parameter_name, operation_names):
+ """
+ :type service_name: str
+ :param service_name: Name of the service to modify.
+
+ :type parameter_name: str
+ :param parameter_name: Name of the parameter to modify.
+
+ :type operation_names: list
+ :param operation_names: Operation names to modify.
+ """
+ self._parameter_name = parameter_name
+ self._params_events = set()
+ self._example_events = set()
+ # Build up the sets of relevant event names.
+ param_template = 'docs.request-params.%s.%s.complete-section'
+ example_template = 'docs.request-example.%s.%s.complete-section'
+ for name in operation_names:
+ self._params_events.add(param_template % (service_name, name))
+ self._example_events.add(example_template % (service_name, name))
+
+ def hide_param(self, event_name, section, **kwargs):
+ if event_name in self._example_events:
+ # Modify the structure value for example events.
+ section = section.get_section('structure-value')
+ elif event_name not in self._params_events:
+ return
+ if self._parameter_name in section.available_sections:
+ section.delete_section(self._parameter_name)
+
+
+class AppendParamDocumentation:
+ """Appends documentation to a specific parameter"""
+
+ def __init__(self, parameter_name, doc_string):
+ self._parameter_name = parameter_name
+ self._doc_string = doc_string
+
+ def append_documentation(self, event_name, section, **kwargs):
+ if self._parameter_name in section.available_sections:
+ section = section.get_section(self._parameter_name)
+ description_section = section.get_section('param-documentation')
+ description_section.writeln(self._doc_string)
+
+
+_CONTROLS = {
+ '\n': '\\n',
+ '\r': '\\r',
+ '\t': '\\t',
+ '\b': '\\b',
+ '\f': '\\f',
+}
+# Combines all CONTROLS keys into a big or regular expression
+_ESCAPE_CONTROLS_RE = re.compile('|'.join(map(re.escape, _CONTROLS)))
+
+
+# Based on the match get the appropriate replacement from CONTROLS
+def _CONTROLS_MATCH_HANDLER(match):
+ return _CONTROLS[match.group(0)]
+
+
+def escape_controls(value):
+ return _ESCAPE_CONTROLS_RE.sub(_CONTROLS_MATCH_HANDLER, value)
diff --git a/py311/lib/python3.11/site-packages/botocore/docs/waiter.py b/py311/lib/python3.11/site-packages/botocore/docs/waiter.py
new file mode 100644
index 0000000000000000000000000000000000000000..2918602d2f471be53be47357bd2cf4326fa53dab
--- /dev/null
+++ b/py311/lib/python3.11/site-packages/botocore/docs/waiter.py
@@ -0,0 +1,180 @@
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# http://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import os
+
+from botocore import xform_name
+from botocore.compat import OrderedDict
+from botocore.docs.bcdoc.restdoc import DocumentStructure
+from botocore.docs.method import document_model_driven_method
+from botocore.docs.utils import DocumentedShape
+from botocore.utils import get_service_module_name
+
+
+class WaiterDocumenter:
+ def __init__(self, client, service_waiter_model, root_docs_path):
+ self._client = client
+ self._client_class_name = self._client.__class__.__name__
+ self._service_name = self._client.meta.service_model.service_name
+ self._service_waiter_model = service_waiter_model
+ self._root_docs_path = root_docs_path
+ self._USER_GUIDE_LINK = (
+ 'https://boto3.amazonaws.com/'
+ 'v1/documentation/api/latest/guide/clients.html#waiters'
+ )
+
+ def document_waiters(self, section):
+ """Documents the various waiters for a service.
+
+ :param section: The section to write to.
+ """
+ section.style.h2('Waiters')
+ self._add_overview(section)
+ section.style.new_line()
+ section.writeln('The available waiters are:')
+ section.style.toctree()
+ for waiter_name in self._service_waiter_model.waiter_names:
+ section.style.tocitem(f'{self._service_name}/waiter/{waiter_name}')
+ # Create a new DocumentStructure for each waiter and add contents.
+ waiter_doc_structure = DocumentStructure(
+ waiter_name, target='html'
+ )
+ self._add_single_waiter(waiter_doc_structure, waiter_name)
+ # Write waiters in individual/nested files.
+ # Path: /reference/services//waiter/.rst
+ waiter_dir_path = os.path.join(
+ self._root_docs_path, self._service_name, 'waiter'
+ )
+ waiter_doc_structure.write_to_file(waiter_dir_path, waiter_name)
+
+ def _add_single_waiter(self, section, waiter_name):
+ breadcrumb_section = section.add_new_section('breadcrumb')
+ breadcrumb_section.style.ref(
+ self._client_class_name, f'../../{self._service_name}'
+ )
+ breadcrumb_section.write(f' / Waiter / {waiter_name}')
+ section.add_title_section(waiter_name)
+ waiter_section = section.add_new_section(waiter_name)
+ waiter_section.style.start_sphinx_py_class(
+ class_name=f"{self._client_class_name}.Waiter.{waiter_name}"
+ )
+
+ # Add example on how to instantiate waiter.
+ waiter_section.style.start_codeblock()
+ waiter_section.style.new_line()
+ waiter_section.write(
+ f'waiter = client.get_waiter(\'{xform_name(waiter_name)}\')'
+ )
+ waiter_section.style.end_codeblock()
+
+ # Add information on the wait() method
+ waiter_section.style.new_line()
+ document_wait_method(
+ section=waiter_section,
+ waiter_name=waiter_name,
+ event_emitter=self._client.meta.events,
+ service_model=self._client.meta.service_model,
+ service_waiter_model=self._service_waiter_model,
+ )
+
+ def _add_overview(self, section):
+ section.style.new_line()
+ section.write(
+ 'Waiters are available on a client instance '
+ 'via the ``get_waiter`` method. For more detailed instructions '
+ 'and examples on the usage or waiters, see the '
+ 'waiters '
+ )
+ section.style.external_link(
+ title='user guide',
+ link=self._USER_GUIDE_LINK,
+ )
+ section.write('.')
+ section.style.new_line()
+
+
+def document_wait_method(
+ section,
+ waiter_name,
+ event_emitter,
+ service_model,
+ service_waiter_model,
+ include_signature=True,
+):
+ """Documents a the wait method of a waiter
+
+ :param section: The section to write to
+
+ :param waiter_name: The name of the waiter
+
+ :param event_emitter: The event emitter to use to emit events
+
+ :param service_model: The service model
+
+ :param service_waiter_model: The waiter model associated to the service
+
+ :param include_signature: Whether or not to include the signature.
+ It is useful for generating docstrings.
+ """
+ waiter_model = service_waiter_model.get_waiter(waiter_name)
+ operation_model = service_model.operation_model(waiter_model.operation)
+
+ waiter_config_members = OrderedDict()
+
+ waiter_config_members['Delay'] = DocumentedShape(
+ name='Delay',
+ type_name='integer',
+ documentation=(
+ '
The amount of time in seconds to wait between '
+ f'attempts. Default: {waiter_model.delay}