Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/list.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/cache.py +225 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/configuration.py +280 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/debug.py +201 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/download.py +146 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/freeze.py +109 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/hash.py +59 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/help.py +41 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/index.py +139 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/install.py +783 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/search.py +172 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/show.py +217 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/misc.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/glibc.py +101 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/subprocess.py +245 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/temp_dir.py +296 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/virtualenv.py +104 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/compat.py +1137 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/index.py +508 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/manifest.py +384 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/markers.py +162 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/metadata.py +1031 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/resources.py +358 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/scripts.py +447 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/util.py +1984 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/version.py +750 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/w32.exe +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/wheel.py +1100 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_elffile.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_tokenizer.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/metadata.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/_manylinux.py +262 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/specifiers.py +1009 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/utils.py +174 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/urllib3/__init__.py +102 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/urllib3/_version.py +2 -0
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/list.cpython-311.pyc
ADDED
|
Binary file (17.7 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/cache.py
ADDED
|
@@ -0,0 +1,225 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import textwrap
|
| 3 |
+
from optparse import Values
|
| 4 |
+
from typing import Any, List
|
| 5 |
+
|
| 6 |
+
from pip._internal.cli.base_command import Command
|
| 7 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
| 8 |
+
from pip._internal.exceptions import CommandError, PipError
|
| 9 |
+
from pip._internal.utils import filesystem
|
| 10 |
+
from pip._internal.utils.logging import getLogger
|
| 11 |
+
|
| 12 |
+
logger = getLogger(__name__)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class CacheCommand(Command):
|
| 16 |
+
"""
|
| 17 |
+
Inspect and manage pip's wheel cache.
|
| 18 |
+
|
| 19 |
+
Subcommands:
|
| 20 |
+
|
| 21 |
+
- dir: Show the cache directory.
|
| 22 |
+
- info: Show information about the cache.
|
| 23 |
+
- list: List filenames of packages stored in the cache.
|
| 24 |
+
- remove: Remove one or more package from the cache.
|
| 25 |
+
- purge: Remove all items from the cache.
|
| 26 |
+
|
| 27 |
+
``<pattern>`` can be a glob expression or a package name.
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
ignore_require_venv = True
|
| 31 |
+
usage = """
|
| 32 |
+
%prog dir
|
| 33 |
+
%prog info
|
| 34 |
+
%prog list [<pattern>] [--format=[human, abspath]]
|
| 35 |
+
%prog remove <pattern>
|
| 36 |
+
%prog purge
|
| 37 |
+
"""
|
| 38 |
+
|
| 39 |
+
def add_options(self) -> None:
|
| 40 |
+
self.cmd_opts.add_option(
|
| 41 |
+
"--format",
|
| 42 |
+
action="store",
|
| 43 |
+
dest="list_format",
|
| 44 |
+
default="human",
|
| 45 |
+
choices=("human", "abspath"),
|
| 46 |
+
help="Select the output format among: human (default) or abspath",
|
| 47 |
+
)
|
| 48 |
+
|
| 49 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 50 |
+
|
| 51 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 52 |
+
handlers = {
|
| 53 |
+
"dir": self.get_cache_dir,
|
| 54 |
+
"info": self.get_cache_info,
|
| 55 |
+
"list": self.list_cache_items,
|
| 56 |
+
"remove": self.remove_cache_items,
|
| 57 |
+
"purge": self.purge_cache,
|
| 58 |
+
}
|
| 59 |
+
|
| 60 |
+
if not options.cache_dir:
|
| 61 |
+
logger.error("pip cache commands can not function since cache is disabled.")
|
| 62 |
+
return ERROR
|
| 63 |
+
|
| 64 |
+
# Determine action
|
| 65 |
+
if not args or args[0] not in handlers:
|
| 66 |
+
logger.error(
|
| 67 |
+
"Need an action (%s) to perform.",
|
| 68 |
+
", ".join(sorted(handlers)),
|
| 69 |
+
)
|
| 70 |
+
return ERROR
|
| 71 |
+
|
| 72 |
+
action = args[0]
|
| 73 |
+
|
| 74 |
+
# Error handling happens here, not in the action-handlers.
|
| 75 |
+
try:
|
| 76 |
+
handlers[action](options, args[1:])
|
| 77 |
+
except PipError as e:
|
| 78 |
+
logger.error(e.args[0])
|
| 79 |
+
return ERROR
|
| 80 |
+
|
| 81 |
+
return SUCCESS
|
| 82 |
+
|
| 83 |
+
def get_cache_dir(self, options: Values, args: List[Any]) -> None:
|
| 84 |
+
if args:
|
| 85 |
+
raise CommandError("Too many arguments")
|
| 86 |
+
|
| 87 |
+
logger.info(options.cache_dir)
|
| 88 |
+
|
| 89 |
+
def get_cache_info(self, options: Values, args: List[Any]) -> None:
|
| 90 |
+
if args:
|
| 91 |
+
raise CommandError("Too many arguments")
|
| 92 |
+
|
| 93 |
+
num_http_files = len(self._find_http_files(options))
|
| 94 |
+
num_packages = len(self._find_wheels(options, "*"))
|
| 95 |
+
|
| 96 |
+
http_cache_location = self._cache_dir(options, "http-v2")
|
| 97 |
+
old_http_cache_location = self._cache_dir(options, "http")
|
| 98 |
+
wheels_cache_location = self._cache_dir(options, "wheels")
|
| 99 |
+
http_cache_size = filesystem.format_size(
|
| 100 |
+
filesystem.directory_size(http_cache_location)
|
| 101 |
+
+ filesystem.directory_size(old_http_cache_location)
|
| 102 |
+
)
|
| 103 |
+
wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
|
| 104 |
+
|
| 105 |
+
message = (
|
| 106 |
+
textwrap.dedent(
|
| 107 |
+
"""
|
| 108 |
+
Package index page cache location (pip v23.3+): {http_cache_location}
|
| 109 |
+
Package index page cache location (older pips): {old_http_cache_location}
|
| 110 |
+
Package index page cache size: {http_cache_size}
|
| 111 |
+
Number of HTTP files: {num_http_files}
|
| 112 |
+
Locally built wheels location: {wheels_cache_location}
|
| 113 |
+
Locally built wheels size: {wheels_cache_size}
|
| 114 |
+
Number of locally built wheels: {package_count}
|
| 115 |
+
""" # noqa: E501
|
| 116 |
+
)
|
| 117 |
+
.format(
|
| 118 |
+
http_cache_location=http_cache_location,
|
| 119 |
+
old_http_cache_location=old_http_cache_location,
|
| 120 |
+
http_cache_size=http_cache_size,
|
| 121 |
+
num_http_files=num_http_files,
|
| 122 |
+
wheels_cache_location=wheels_cache_location,
|
| 123 |
+
package_count=num_packages,
|
| 124 |
+
wheels_cache_size=wheels_cache_size,
|
| 125 |
+
)
|
| 126 |
+
.strip()
|
| 127 |
+
)
|
| 128 |
+
|
| 129 |
+
logger.info(message)
|
| 130 |
+
|
| 131 |
+
def list_cache_items(self, options: Values, args: List[Any]) -> None:
|
| 132 |
+
if len(args) > 1:
|
| 133 |
+
raise CommandError("Too many arguments")
|
| 134 |
+
|
| 135 |
+
if args:
|
| 136 |
+
pattern = args[0]
|
| 137 |
+
else:
|
| 138 |
+
pattern = "*"
|
| 139 |
+
|
| 140 |
+
files = self._find_wheels(options, pattern)
|
| 141 |
+
if options.list_format == "human":
|
| 142 |
+
self.format_for_human(files)
|
| 143 |
+
else:
|
| 144 |
+
self.format_for_abspath(files)
|
| 145 |
+
|
| 146 |
+
def format_for_human(self, files: List[str]) -> None:
|
| 147 |
+
if not files:
|
| 148 |
+
logger.info("No locally built wheels cached.")
|
| 149 |
+
return
|
| 150 |
+
|
| 151 |
+
results = []
|
| 152 |
+
for filename in files:
|
| 153 |
+
wheel = os.path.basename(filename)
|
| 154 |
+
size = filesystem.format_file_size(filename)
|
| 155 |
+
results.append(f" - {wheel} ({size})")
|
| 156 |
+
logger.info("Cache contents:\n")
|
| 157 |
+
logger.info("\n".join(sorted(results)))
|
| 158 |
+
|
| 159 |
+
def format_for_abspath(self, files: List[str]) -> None:
|
| 160 |
+
if files:
|
| 161 |
+
logger.info("\n".join(sorted(files)))
|
| 162 |
+
|
| 163 |
+
def remove_cache_items(self, options: Values, args: List[Any]) -> None:
|
| 164 |
+
if len(args) > 1:
|
| 165 |
+
raise CommandError("Too many arguments")
|
| 166 |
+
|
| 167 |
+
if not args:
|
| 168 |
+
raise CommandError("Please provide a pattern")
|
| 169 |
+
|
| 170 |
+
files = self._find_wheels(options, args[0])
|
| 171 |
+
|
| 172 |
+
no_matching_msg = "No matching packages"
|
| 173 |
+
if args[0] == "*":
|
| 174 |
+
# Only fetch http files if no specific pattern given
|
| 175 |
+
files += self._find_http_files(options)
|
| 176 |
+
else:
|
| 177 |
+
# Add the pattern to the log message
|
| 178 |
+
no_matching_msg += f' for pattern "{args[0]}"'
|
| 179 |
+
|
| 180 |
+
if not files:
|
| 181 |
+
logger.warning(no_matching_msg)
|
| 182 |
+
|
| 183 |
+
for filename in files:
|
| 184 |
+
os.unlink(filename)
|
| 185 |
+
logger.verbose("Removed %s", filename)
|
| 186 |
+
logger.info("Files removed: %s", len(files))
|
| 187 |
+
|
| 188 |
+
def purge_cache(self, options: Values, args: List[Any]) -> None:
|
| 189 |
+
if args:
|
| 190 |
+
raise CommandError("Too many arguments")
|
| 191 |
+
|
| 192 |
+
return self.remove_cache_items(options, ["*"])
|
| 193 |
+
|
| 194 |
+
def _cache_dir(self, options: Values, subdir: str) -> str:
|
| 195 |
+
return os.path.join(options.cache_dir, subdir)
|
| 196 |
+
|
| 197 |
+
def _find_http_files(self, options: Values) -> List[str]:
|
| 198 |
+
old_http_dir = self._cache_dir(options, "http")
|
| 199 |
+
new_http_dir = self._cache_dir(options, "http-v2")
|
| 200 |
+
return filesystem.find_files(old_http_dir, "*") + filesystem.find_files(
|
| 201 |
+
new_http_dir, "*"
|
| 202 |
+
)
|
| 203 |
+
|
| 204 |
+
def _find_wheels(self, options: Values, pattern: str) -> List[str]:
|
| 205 |
+
wheel_dir = self._cache_dir(options, "wheels")
|
| 206 |
+
|
| 207 |
+
# The wheel filename format, as specified in PEP 427, is:
|
| 208 |
+
# {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
|
| 209 |
+
#
|
| 210 |
+
# Additionally, non-alphanumeric values in the distribution are
|
| 211 |
+
# normalized to underscores (_), meaning hyphens can never occur
|
| 212 |
+
# before `-{version}`.
|
| 213 |
+
#
|
| 214 |
+
# Given that information:
|
| 215 |
+
# - If the pattern we're given contains a hyphen (-), the user is
|
| 216 |
+
# providing at least the version. Thus, we can just append `*.whl`
|
| 217 |
+
# to match the rest of it.
|
| 218 |
+
# - If the pattern we're given doesn't contain a hyphen (-), the
|
| 219 |
+
# user is only providing the name. Thus, we append `-*.whl` to
|
| 220 |
+
# match the hyphen before the version, followed by anything else.
|
| 221 |
+
#
|
| 222 |
+
# PEP 427: https://www.python.org/dev/peps/pep-0427/
|
| 223 |
+
pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl")
|
| 224 |
+
|
| 225 |
+
return filesystem.find_files(wheel_dir, pattern)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/configuration.py
ADDED
|
@@ -0,0 +1,280 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
import subprocess
|
| 4 |
+
from optparse import Values
|
| 5 |
+
from typing import Any, List, Optional
|
| 6 |
+
|
| 7 |
+
from pip._internal.cli.base_command import Command
|
| 8 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
| 9 |
+
from pip._internal.configuration import (
|
| 10 |
+
Configuration,
|
| 11 |
+
Kind,
|
| 12 |
+
get_configuration_files,
|
| 13 |
+
kinds,
|
| 14 |
+
)
|
| 15 |
+
from pip._internal.exceptions import PipError
|
| 16 |
+
from pip._internal.utils.logging import indent_log
|
| 17 |
+
from pip._internal.utils.misc import get_prog, write_output
|
| 18 |
+
|
| 19 |
+
logger = logging.getLogger(__name__)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class ConfigurationCommand(Command):
|
| 23 |
+
"""
|
| 24 |
+
Manage local and global configuration.
|
| 25 |
+
|
| 26 |
+
Subcommands:
|
| 27 |
+
|
| 28 |
+
- list: List the active configuration (or from the file specified)
|
| 29 |
+
- edit: Edit the configuration file in an editor
|
| 30 |
+
- get: Get the value associated with command.option
|
| 31 |
+
- set: Set the command.option=value
|
| 32 |
+
- unset: Unset the value associated with command.option
|
| 33 |
+
- debug: List the configuration files and values defined under them
|
| 34 |
+
|
| 35 |
+
Configuration keys should be dot separated command and option name,
|
| 36 |
+
with the special prefix "global" affecting any command. For example,
|
| 37 |
+
"pip config set global.index-url https://example.org/" would configure
|
| 38 |
+
the index url for all commands, but "pip config set download.timeout 10"
|
| 39 |
+
would configure a 10 second timeout only for "pip download" commands.
|
| 40 |
+
|
| 41 |
+
If none of --user, --global and --site are passed, a virtual
|
| 42 |
+
environment configuration file is used if one is active and the file
|
| 43 |
+
exists. Otherwise, all modifications happen to the user file by
|
| 44 |
+
default.
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
ignore_require_venv = True
|
| 48 |
+
usage = """
|
| 49 |
+
%prog [<file-option>] list
|
| 50 |
+
%prog [<file-option>] [--editor <editor-path>] edit
|
| 51 |
+
|
| 52 |
+
%prog [<file-option>] get command.option
|
| 53 |
+
%prog [<file-option>] set command.option value
|
| 54 |
+
%prog [<file-option>] unset command.option
|
| 55 |
+
%prog [<file-option>] debug
|
| 56 |
+
"""
|
| 57 |
+
|
| 58 |
+
def add_options(self) -> None:
|
| 59 |
+
self.cmd_opts.add_option(
|
| 60 |
+
"--editor",
|
| 61 |
+
dest="editor",
|
| 62 |
+
action="store",
|
| 63 |
+
default=None,
|
| 64 |
+
help=(
|
| 65 |
+
"Editor to use to edit the file. Uses VISUAL or EDITOR "
|
| 66 |
+
"environment variables if not provided."
|
| 67 |
+
),
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
self.cmd_opts.add_option(
|
| 71 |
+
"--global",
|
| 72 |
+
dest="global_file",
|
| 73 |
+
action="store_true",
|
| 74 |
+
default=False,
|
| 75 |
+
help="Use the system-wide configuration file only",
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
self.cmd_opts.add_option(
|
| 79 |
+
"--user",
|
| 80 |
+
dest="user_file",
|
| 81 |
+
action="store_true",
|
| 82 |
+
default=False,
|
| 83 |
+
help="Use the user configuration file only",
|
| 84 |
+
)
|
| 85 |
+
|
| 86 |
+
self.cmd_opts.add_option(
|
| 87 |
+
"--site",
|
| 88 |
+
dest="site_file",
|
| 89 |
+
action="store_true",
|
| 90 |
+
default=False,
|
| 91 |
+
help="Use the current environment configuration file only",
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 95 |
+
|
| 96 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 97 |
+
handlers = {
|
| 98 |
+
"list": self.list_values,
|
| 99 |
+
"edit": self.open_in_editor,
|
| 100 |
+
"get": self.get_name,
|
| 101 |
+
"set": self.set_name_value,
|
| 102 |
+
"unset": self.unset_name,
|
| 103 |
+
"debug": self.list_config_values,
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
# Determine action
|
| 107 |
+
if not args or args[0] not in handlers:
|
| 108 |
+
logger.error(
|
| 109 |
+
"Need an action (%s) to perform.",
|
| 110 |
+
", ".join(sorted(handlers)),
|
| 111 |
+
)
|
| 112 |
+
return ERROR
|
| 113 |
+
|
| 114 |
+
action = args[0]
|
| 115 |
+
|
| 116 |
+
# Determine which configuration files are to be loaded
|
| 117 |
+
# Depends on whether the command is modifying.
|
| 118 |
+
try:
|
| 119 |
+
load_only = self._determine_file(
|
| 120 |
+
options, need_value=(action in ["get", "set", "unset", "edit"])
|
| 121 |
+
)
|
| 122 |
+
except PipError as e:
|
| 123 |
+
logger.error(e.args[0])
|
| 124 |
+
return ERROR
|
| 125 |
+
|
| 126 |
+
# Load a new configuration
|
| 127 |
+
self.configuration = Configuration(
|
| 128 |
+
isolated=options.isolated_mode, load_only=load_only
|
| 129 |
+
)
|
| 130 |
+
self.configuration.load()
|
| 131 |
+
|
| 132 |
+
# Error handling happens here, not in the action-handlers.
|
| 133 |
+
try:
|
| 134 |
+
handlers[action](options, args[1:])
|
| 135 |
+
except PipError as e:
|
| 136 |
+
logger.error(e.args[0])
|
| 137 |
+
return ERROR
|
| 138 |
+
|
| 139 |
+
return SUCCESS
|
| 140 |
+
|
| 141 |
+
def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:
|
| 142 |
+
file_options = [
|
| 143 |
+
key
|
| 144 |
+
for key, value in (
|
| 145 |
+
(kinds.USER, options.user_file),
|
| 146 |
+
(kinds.GLOBAL, options.global_file),
|
| 147 |
+
(kinds.SITE, options.site_file),
|
| 148 |
+
)
|
| 149 |
+
if value
|
| 150 |
+
]
|
| 151 |
+
|
| 152 |
+
if not file_options:
|
| 153 |
+
if not need_value:
|
| 154 |
+
return None
|
| 155 |
+
# Default to user, unless there's a site file.
|
| 156 |
+
elif any(
|
| 157 |
+
os.path.exists(site_config_file)
|
| 158 |
+
for site_config_file in get_configuration_files()[kinds.SITE]
|
| 159 |
+
):
|
| 160 |
+
return kinds.SITE
|
| 161 |
+
else:
|
| 162 |
+
return kinds.USER
|
| 163 |
+
elif len(file_options) == 1:
|
| 164 |
+
return file_options[0]
|
| 165 |
+
|
| 166 |
+
raise PipError(
|
| 167 |
+
"Need exactly one file to operate upon "
|
| 168 |
+
"(--user, --site, --global) to perform."
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
def list_values(self, options: Values, args: List[str]) -> None:
|
| 172 |
+
self._get_n_args(args, "list", n=0)
|
| 173 |
+
|
| 174 |
+
for key, value in sorted(self.configuration.items()):
|
| 175 |
+
write_output("%s=%r", key, value)
|
| 176 |
+
|
| 177 |
+
def get_name(self, options: Values, args: List[str]) -> None:
|
| 178 |
+
key = self._get_n_args(args, "get [name]", n=1)
|
| 179 |
+
value = self.configuration.get_value(key)
|
| 180 |
+
|
| 181 |
+
write_output("%s", value)
|
| 182 |
+
|
| 183 |
+
def set_name_value(self, options: Values, args: List[str]) -> None:
|
| 184 |
+
key, value = self._get_n_args(args, "set [name] [value]", n=2)
|
| 185 |
+
self.configuration.set_value(key, value)
|
| 186 |
+
|
| 187 |
+
self._save_configuration()
|
| 188 |
+
|
| 189 |
+
def unset_name(self, options: Values, args: List[str]) -> None:
|
| 190 |
+
key = self._get_n_args(args, "unset [name]", n=1)
|
| 191 |
+
self.configuration.unset_value(key)
|
| 192 |
+
|
| 193 |
+
self._save_configuration()
|
| 194 |
+
|
| 195 |
+
def list_config_values(self, options: Values, args: List[str]) -> None:
|
| 196 |
+
"""List config key-value pairs across different config files"""
|
| 197 |
+
self._get_n_args(args, "debug", n=0)
|
| 198 |
+
|
| 199 |
+
self.print_env_var_values()
|
| 200 |
+
# Iterate over config files and print if they exist, and the
|
| 201 |
+
# key-value pairs present in them if they do
|
| 202 |
+
for variant, files in sorted(self.configuration.iter_config_files()):
|
| 203 |
+
write_output("%s:", variant)
|
| 204 |
+
for fname in files:
|
| 205 |
+
with indent_log():
|
| 206 |
+
file_exists = os.path.exists(fname)
|
| 207 |
+
write_output("%s, exists: %r", fname, file_exists)
|
| 208 |
+
if file_exists:
|
| 209 |
+
self.print_config_file_values(variant)
|
| 210 |
+
|
| 211 |
+
def print_config_file_values(self, variant: Kind) -> None:
|
| 212 |
+
"""Get key-value pairs from the file of a variant"""
|
| 213 |
+
for name, value in self.configuration.get_values_in_config(variant).items():
|
| 214 |
+
with indent_log():
|
| 215 |
+
write_output("%s: %s", name, value)
|
| 216 |
+
|
| 217 |
+
def print_env_var_values(self) -> None:
|
| 218 |
+
"""Get key-values pairs present as environment variables"""
|
| 219 |
+
write_output("%s:", "env_var")
|
| 220 |
+
with indent_log():
|
| 221 |
+
for key, value in sorted(self.configuration.get_environ_vars()):
|
| 222 |
+
env_var = f"PIP_{key.upper()}"
|
| 223 |
+
write_output("%s=%r", env_var, value)
|
| 224 |
+
|
| 225 |
+
def open_in_editor(self, options: Values, args: List[str]) -> None:
|
| 226 |
+
editor = self._determine_editor(options)
|
| 227 |
+
|
| 228 |
+
fname = self.configuration.get_file_to_edit()
|
| 229 |
+
if fname is None:
|
| 230 |
+
raise PipError("Could not determine appropriate file.")
|
| 231 |
+
elif '"' in fname:
|
| 232 |
+
# This shouldn't happen, unless we see a username like that.
|
| 233 |
+
# If that happens, we'd appreciate a pull request fixing this.
|
| 234 |
+
raise PipError(
|
| 235 |
+
f'Can not open an editor for a file name containing "\n{fname}'
|
| 236 |
+
)
|
| 237 |
+
|
| 238 |
+
try:
|
| 239 |
+
subprocess.check_call(f'{editor} "{fname}"', shell=True)
|
| 240 |
+
except FileNotFoundError as e:
|
| 241 |
+
if not e.filename:
|
| 242 |
+
e.filename = editor
|
| 243 |
+
raise
|
| 244 |
+
except subprocess.CalledProcessError as e:
|
| 245 |
+
raise PipError(f"Editor Subprocess exited with exit code {e.returncode}")
|
| 246 |
+
|
| 247 |
+
def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
|
| 248 |
+
"""Helper to make sure the command got the right number of arguments"""
|
| 249 |
+
if len(args) != n:
|
| 250 |
+
msg = (
|
| 251 |
+
f"Got unexpected number of arguments, expected {n}. "
|
| 252 |
+
f'(example: "{get_prog()} config {example}")'
|
| 253 |
+
)
|
| 254 |
+
raise PipError(msg)
|
| 255 |
+
|
| 256 |
+
if n == 1:
|
| 257 |
+
return args[0]
|
| 258 |
+
else:
|
| 259 |
+
return args
|
| 260 |
+
|
| 261 |
+
def _save_configuration(self) -> None:
|
| 262 |
+
# We successfully ran a modifying command. Need to save the
|
| 263 |
+
# configuration.
|
| 264 |
+
try:
|
| 265 |
+
self.configuration.save()
|
| 266 |
+
except Exception:
|
| 267 |
+
logger.exception(
|
| 268 |
+
"Unable to save configuration. Please report this as a bug."
|
| 269 |
+
)
|
| 270 |
+
raise PipError("Internal Error.")
|
| 271 |
+
|
| 272 |
+
def _determine_editor(self, options: Values) -> str:
|
| 273 |
+
if options.editor is not None:
|
| 274 |
+
return options.editor
|
| 275 |
+
elif "VISUAL" in os.environ:
|
| 276 |
+
return os.environ["VISUAL"]
|
| 277 |
+
elif "EDITOR" in os.environ:
|
| 278 |
+
return os.environ["EDITOR"]
|
| 279 |
+
else:
|
| 280 |
+
raise PipError("Could not determine editor to use.")
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/debug.py
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import locale
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
from optparse import Values
|
| 6 |
+
from types import ModuleType
|
| 7 |
+
from typing import Any, Dict, List, Optional
|
| 8 |
+
|
| 9 |
+
import pip._vendor
|
| 10 |
+
from pip._vendor.certifi import where
|
| 11 |
+
from pip._vendor.packaging.version import parse as parse_version
|
| 12 |
+
|
| 13 |
+
from pip._internal.cli import cmdoptions
|
| 14 |
+
from pip._internal.cli.base_command import Command
|
| 15 |
+
from pip._internal.cli.cmdoptions import make_target_python
|
| 16 |
+
from pip._internal.cli.status_codes import SUCCESS
|
| 17 |
+
from pip._internal.configuration import Configuration
|
| 18 |
+
from pip._internal.metadata import get_environment
|
| 19 |
+
from pip._internal.utils.compat import open_text_resource
|
| 20 |
+
from pip._internal.utils.logging import indent_log
|
| 21 |
+
from pip._internal.utils.misc import get_pip_version
|
| 22 |
+
|
| 23 |
+
logger = logging.getLogger(__name__)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def show_value(name: str, value: Any) -> None:
|
| 27 |
+
logger.info("%s: %s", name, value)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def show_sys_implementation() -> None:
|
| 31 |
+
logger.info("sys.implementation:")
|
| 32 |
+
implementation_name = sys.implementation.name
|
| 33 |
+
with indent_log():
|
| 34 |
+
show_value("name", implementation_name)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def create_vendor_txt_map() -> Dict[str, str]:
|
| 38 |
+
with open_text_resource("pip._vendor", "vendor.txt") as f:
|
| 39 |
+
# Purge non version specifying lines.
|
| 40 |
+
# Also, remove any space prefix or suffixes (including comments).
|
| 41 |
+
lines = [
|
| 42 |
+
line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line
|
| 43 |
+
]
|
| 44 |
+
|
| 45 |
+
# Transform into "module" -> version dict.
|
| 46 |
+
return dict(line.split("==", 1) for line in lines)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def get_module_from_module_name(module_name: str) -> Optional[ModuleType]:
|
| 50 |
+
# Module name can be uppercase in vendor.txt for some reason...
|
| 51 |
+
module_name = module_name.lower().replace("-", "_")
|
| 52 |
+
# PATCH: setuptools is actually only pkg_resources.
|
| 53 |
+
if module_name == "setuptools":
|
| 54 |
+
module_name = "pkg_resources"
|
| 55 |
+
|
| 56 |
+
try:
|
| 57 |
+
__import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
|
| 58 |
+
return getattr(pip._vendor, module_name)
|
| 59 |
+
except ImportError:
|
| 60 |
+
# We allow 'truststore' to fail to import due
|
| 61 |
+
# to being unavailable on Python 3.9 and earlier.
|
| 62 |
+
if module_name == "truststore" and sys.version_info < (3, 10):
|
| 63 |
+
return None
|
| 64 |
+
raise
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def get_vendor_version_from_module(module_name: str) -> Optional[str]:
|
| 68 |
+
module = get_module_from_module_name(module_name)
|
| 69 |
+
version = getattr(module, "__version__", None)
|
| 70 |
+
|
| 71 |
+
if module and not version:
|
| 72 |
+
# Try to find version in debundled module info.
|
| 73 |
+
assert module.__file__ is not None
|
| 74 |
+
env = get_environment([os.path.dirname(module.__file__)])
|
| 75 |
+
dist = env.get_distribution(module_name)
|
| 76 |
+
if dist:
|
| 77 |
+
version = str(dist.version)
|
| 78 |
+
|
| 79 |
+
return version
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
|
| 83 |
+
"""Log the actual version and print extra info if there is
|
| 84 |
+
a conflict or if the actual version could not be imported.
|
| 85 |
+
"""
|
| 86 |
+
for module_name, expected_version in vendor_txt_versions.items():
|
| 87 |
+
extra_message = ""
|
| 88 |
+
actual_version = get_vendor_version_from_module(module_name)
|
| 89 |
+
if not actual_version:
|
| 90 |
+
extra_message = (
|
| 91 |
+
" (Unable to locate actual module version, using"
|
| 92 |
+
" vendor.txt specified version)"
|
| 93 |
+
)
|
| 94 |
+
actual_version = expected_version
|
| 95 |
+
elif parse_version(actual_version) != parse_version(expected_version):
|
| 96 |
+
extra_message = (
|
| 97 |
+
" (CONFLICT: vendor.txt suggests version should"
|
| 98 |
+
f" be {expected_version})"
|
| 99 |
+
)
|
| 100 |
+
logger.info("%s==%s%s", module_name, actual_version, extra_message)
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def show_vendor_versions() -> None:
|
| 104 |
+
logger.info("vendored library versions:")
|
| 105 |
+
|
| 106 |
+
vendor_txt_versions = create_vendor_txt_map()
|
| 107 |
+
with indent_log():
|
| 108 |
+
show_actual_vendor_versions(vendor_txt_versions)
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def show_tags(options: Values) -> None:
|
| 112 |
+
tag_limit = 10
|
| 113 |
+
|
| 114 |
+
target_python = make_target_python(options)
|
| 115 |
+
tags = target_python.get_sorted_tags()
|
| 116 |
+
|
| 117 |
+
# Display the target options that were explicitly provided.
|
| 118 |
+
formatted_target = target_python.format_given()
|
| 119 |
+
suffix = ""
|
| 120 |
+
if formatted_target:
|
| 121 |
+
suffix = f" (target: {formatted_target})"
|
| 122 |
+
|
| 123 |
+
msg = f"Compatible tags: {len(tags)}{suffix}"
|
| 124 |
+
logger.info(msg)
|
| 125 |
+
|
| 126 |
+
if options.verbose < 1 and len(tags) > tag_limit:
|
| 127 |
+
tags_limited = True
|
| 128 |
+
tags = tags[:tag_limit]
|
| 129 |
+
else:
|
| 130 |
+
tags_limited = False
|
| 131 |
+
|
| 132 |
+
with indent_log():
|
| 133 |
+
for tag in tags:
|
| 134 |
+
logger.info(str(tag))
|
| 135 |
+
|
| 136 |
+
if tags_limited:
|
| 137 |
+
msg = f"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
|
| 138 |
+
logger.info(msg)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def ca_bundle_info(config: Configuration) -> str:
|
| 142 |
+
levels = {key.split(".", 1)[0] for key, _ in config.items()}
|
| 143 |
+
if not levels:
|
| 144 |
+
return "Not specified"
|
| 145 |
+
|
| 146 |
+
levels_that_override_global = ["install", "wheel", "download"]
|
| 147 |
+
global_overriding_level = [
|
| 148 |
+
level for level in levels if level in levels_that_override_global
|
| 149 |
+
]
|
| 150 |
+
if not global_overriding_level:
|
| 151 |
+
return "global"
|
| 152 |
+
|
| 153 |
+
if "global" in levels:
|
| 154 |
+
levels.remove("global")
|
| 155 |
+
return ", ".join(levels)
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
class DebugCommand(Command):
|
| 159 |
+
"""
|
| 160 |
+
Display debug information.
|
| 161 |
+
"""
|
| 162 |
+
|
| 163 |
+
usage = """
|
| 164 |
+
%prog <options>"""
|
| 165 |
+
ignore_require_venv = True
|
| 166 |
+
|
| 167 |
+
def add_options(self) -> None:
|
| 168 |
+
cmdoptions.add_target_python_options(self.cmd_opts)
|
| 169 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 170 |
+
self.parser.config.load()
|
| 171 |
+
|
| 172 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 173 |
+
logger.warning(
|
| 174 |
+
"This command is only meant for debugging. "
|
| 175 |
+
"Do not use this with automation for parsing and getting these "
|
| 176 |
+
"details, since the output and options of this command may "
|
| 177 |
+
"change without notice."
|
| 178 |
+
)
|
| 179 |
+
show_value("pip version", get_pip_version())
|
| 180 |
+
show_value("sys.version", sys.version)
|
| 181 |
+
show_value("sys.executable", sys.executable)
|
| 182 |
+
show_value("sys.getdefaultencoding", sys.getdefaultencoding())
|
| 183 |
+
show_value("sys.getfilesystemencoding", sys.getfilesystemencoding())
|
| 184 |
+
show_value(
|
| 185 |
+
"locale.getpreferredencoding",
|
| 186 |
+
locale.getpreferredencoding(),
|
| 187 |
+
)
|
| 188 |
+
show_value("sys.platform", sys.platform)
|
| 189 |
+
show_sys_implementation()
|
| 190 |
+
|
| 191 |
+
show_value("'cert' config value", ca_bundle_info(self.parser.config))
|
| 192 |
+
show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE"))
|
| 193 |
+
show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE"))
|
| 194 |
+
show_value("pip._vendor.certifi.where()", where())
|
| 195 |
+
show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
|
| 196 |
+
|
| 197 |
+
show_vendor_versions()
|
| 198 |
+
|
| 199 |
+
show_tags(options)
|
| 200 |
+
|
| 201 |
+
return SUCCESS
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/download.py
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
from optparse import Values
|
| 4 |
+
from typing import List
|
| 5 |
+
|
| 6 |
+
from pip._internal.cli import cmdoptions
|
| 7 |
+
from pip._internal.cli.cmdoptions import make_target_python
|
| 8 |
+
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
| 9 |
+
from pip._internal.cli.status_codes import SUCCESS
|
| 10 |
+
from pip._internal.operations.build.build_tracker import get_build_tracker
|
| 11 |
+
from pip._internal.req.req_install import check_legacy_setup_py_options
|
| 12 |
+
from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
|
| 13 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
| 14 |
+
|
| 15 |
+
logger = logging.getLogger(__name__)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class DownloadCommand(RequirementCommand):
|
| 19 |
+
"""
|
| 20 |
+
Download packages from:
|
| 21 |
+
|
| 22 |
+
- PyPI (and other indexes) using requirement specifiers.
|
| 23 |
+
- VCS project urls.
|
| 24 |
+
- Local project directories.
|
| 25 |
+
- Local or remote source archives.
|
| 26 |
+
|
| 27 |
+
pip also supports downloading from "requirements files", which provide
|
| 28 |
+
an easy way to specify a whole environment to be downloaded.
|
| 29 |
+
"""
|
| 30 |
+
|
| 31 |
+
usage = """
|
| 32 |
+
%prog [options] <requirement specifier> [package-index-options] ...
|
| 33 |
+
%prog [options] -r <requirements file> [package-index-options] ...
|
| 34 |
+
%prog [options] <vcs project url> ...
|
| 35 |
+
%prog [options] <local project path> ...
|
| 36 |
+
%prog [options] <archive url/path> ..."""
|
| 37 |
+
|
| 38 |
+
def add_options(self) -> None:
|
| 39 |
+
self.cmd_opts.add_option(cmdoptions.constraints())
|
| 40 |
+
self.cmd_opts.add_option(cmdoptions.requirements())
|
| 41 |
+
self.cmd_opts.add_option(cmdoptions.no_deps())
|
| 42 |
+
self.cmd_opts.add_option(cmdoptions.global_options())
|
| 43 |
+
self.cmd_opts.add_option(cmdoptions.no_binary())
|
| 44 |
+
self.cmd_opts.add_option(cmdoptions.only_binary())
|
| 45 |
+
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
| 46 |
+
self.cmd_opts.add_option(cmdoptions.src())
|
| 47 |
+
self.cmd_opts.add_option(cmdoptions.pre())
|
| 48 |
+
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
| 49 |
+
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
| 50 |
+
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
| 51 |
+
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
| 52 |
+
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
| 53 |
+
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
| 54 |
+
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
| 55 |
+
|
| 56 |
+
self.cmd_opts.add_option(
|
| 57 |
+
"-d",
|
| 58 |
+
"--dest",
|
| 59 |
+
"--destination-dir",
|
| 60 |
+
"--destination-directory",
|
| 61 |
+
dest="download_dir",
|
| 62 |
+
metavar="dir",
|
| 63 |
+
default=os.curdir,
|
| 64 |
+
help="Download packages into <dir>.",
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
cmdoptions.add_target_python_options(self.cmd_opts)
|
| 68 |
+
|
| 69 |
+
index_opts = cmdoptions.make_option_group(
|
| 70 |
+
cmdoptions.index_group,
|
| 71 |
+
self.parser,
|
| 72 |
+
)
|
| 73 |
+
|
| 74 |
+
self.parser.insert_option_group(0, index_opts)
|
| 75 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 76 |
+
|
| 77 |
+
@with_cleanup
|
| 78 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 79 |
+
options.ignore_installed = True
|
| 80 |
+
# editable doesn't really make sense for `pip download`, but the bowels
|
| 81 |
+
# of the RequirementSet code require that property.
|
| 82 |
+
options.editables = []
|
| 83 |
+
|
| 84 |
+
cmdoptions.check_dist_restriction(options)
|
| 85 |
+
|
| 86 |
+
options.download_dir = normalize_path(options.download_dir)
|
| 87 |
+
ensure_dir(options.download_dir)
|
| 88 |
+
|
| 89 |
+
session = self.get_default_session(options)
|
| 90 |
+
|
| 91 |
+
target_python = make_target_python(options)
|
| 92 |
+
finder = self._build_package_finder(
|
| 93 |
+
options=options,
|
| 94 |
+
session=session,
|
| 95 |
+
target_python=target_python,
|
| 96 |
+
ignore_requires_python=options.ignore_requires_python,
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
build_tracker = self.enter_context(get_build_tracker())
|
| 100 |
+
|
| 101 |
+
directory = TempDirectory(
|
| 102 |
+
delete=not options.no_clean,
|
| 103 |
+
kind="download",
|
| 104 |
+
globally_managed=True,
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
reqs = self.get_requirements(args, options, finder, session)
|
| 108 |
+
check_legacy_setup_py_options(options, reqs)
|
| 109 |
+
|
| 110 |
+
preparer = self.make_requirement_preparer(
|
| 111 |
+
temp_build_dir=directory,
|
| 112 |
+
options=options,
|
| 113 |
+
build_tracker=build_tracker,
|
| 114 |
+
session=session,
|
| 115 |
+
finder=finder,
|
| 116 |
+
download_dir=options.download_dir,
|
| 117 |
+
use_user_site=False,
|
| 118 |
+
verbosity=self.verbosity,
|
| 119 |
+
)
|
| 120 |
+
|
| 121 |
+
resolver = self.make_resolver(
|
| 122 |
+
preparer=preparer,
|
| 123 |
+
finder=finder,
|
| 124 |
+
options=options,
|
| 125 |
+
ignore_requires_python=options.ignore_requires_python,
|
| 126 |
+
use_pep517=options.use_pep517,
|
| 127 |
+
py_version_info=options.python_version,
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
self.trace_basic_info(finder)
|
| 131 |
+
|
| 132 |
+
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
|
| 133 |
+
|
| 134 |
+
downloaded: List[str] = []
|
| 135 |
+
for req in requirement_set.requirements.values():
|
| 136 |
+
if req.satisfied_by is None:
|
| 137 |
+
assert req.name is not None
|
| 138 |
+
preparer.save_linked_requirement(req)
|
| 139 |
+
downloaded.append(req.name)
|
| 140 |
+
|
| 141 |
+
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
|
| 142 |
+
|
| 143 |
+
if downloaded:
|
| 144 |
+
write_output("Successfully downloaded %s", " ".join(downloaded))
|
| 145 |
+
|
| 146 |
+
return SUCCESS
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/freeze.py
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from optparse import Values
|
| 3 |
+
from typing import AbstractSet, List
|
| 4 |
+
|
| 5 |
+
from pip._internal.cli import cmdoptions
|
| 6 |
+
from pip._internal.cli.base_command import Command
|
| 7 |
+
from pip._internal.cli.status_codes import SUCCESS
|
| 8 |
+
from pip._internal.operations.freeze import freeze
|
| 9 |
+
from pip._internal.utils.compat import stdlib_pkgs
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def _should_suppress_build_backends() -> bool:
|
| 13 |
+
return sys.version_info < (3, 12)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def _dev_pkgs() -> AbstractSet[str]:
|
| 17 |
+
pkgs = {"pip"}
|
| 18 |
+
|
| 19 |
+
if _should_suppress_build_backends():
|
| 20 |
+
pkgs |= {"setuptools", "distribute", "wheel"}
|
| 21 |
+
|
| 22 |
+
return pkgs
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class FreezeCommand(Command):
|
| 26 |
+
"""
|
| 27 |
+
Output installed packages in requirements format.
|
| 28 |
+
|
| 29 |
+
packages are listed in a case-insensitive sorted order.
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
ignore_require_venv = True
|
| 33 |
+
usage = """
|
| 34 |
+
%prog [options]"""
|
| 35 |
+
log_streams = ("ext://sys.stderr", "ext://sys.stderr")
|
| 36 |
+
|
| 37 |
+
def add_options(self) -> None:
|
| 38 |
+
self.cmd_opts.add_option(
|
| 39 |
+
"-r",
|
| 40 |
+
"--requirement",
|
| 41 |
+
dest="requirements",
|
| 42 |
+
action="append",
|
| 43 |
+
default=[],
|
| 44 |
+
metavar="file",
|
| 45 |
+
help=(
|
| 46 |
+
"Use the order in the given requirements file and its "
|
| 47 |
+
"comments when generating output. This option can be "
|
| 48 |
+
"used multiple times."
|
| 49 |
+
),
|
| 50 |
+
)
|
| 51 |
+
self.cmd_opts.add_option(
|
| 52 |
+
"-l",
|
| 53 |
+
"--local",
|
| 54 |
+
dest="local",
|
| 55 |
+
action="store_true",
|
| 56 |
+
default=False,
|
| 57 |
+
help=(
|
| 58 |
+
"If in a virtualenv that has global access, do not output "
|
| 59 |
+
"globally-installed packages."
|
| 60 |
+
),
|
| 61 |
+
)
|
| 62 |
+
self.cmd_opts.add_option(
|
| 63 |
+
"--user",
|
| 64 |
+
dest="user",
|
| 65 |
+
action="store_true",
|
| 66 |
+
default=False,
|
| 67 |
+
help="Only output packages installed in user-site.",
|
| 68 |
+
)
|
| 69 |
+
self.cmd_opts.add_option(cmdoptions.list_path())
|
| 70 |
+
self.cmd_opts.add_option(
|
| 71 |
+
"--all",
|
| 72 |
+
dest="freeze_all",
|
| 73 |
+
action="store_true",
|
| 74 |
+
help=(
|
| 75 |
+
"Do not skip these packages in the output:"
|
| 76 |
+
" {}".format(", ".join(_dev_pkgs()))
|
| 77 |
+
),
|
| 78 |
+
)
|
| 79 |
+
self.cmd_opts.add_option(
|
| 80 |
+
"--exclude-editable",
|
| 81 |
+
dest="exclude_editable",
|
| 82 |
+
action="store_true",
|
| 83 |
+
help="Exclude editable package from output.",
|
| 84 |
+
)
|
| 85 |
+
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
| 86 |
+
|
| 87 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 88 |
+
|
| 89 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 90 |
+
skip = set(stdlib_pkgs)
|
| 91 |
+
if not options.freeze_all:
|
| 92 |
+
skip.update(_dev_pkgs())
|
| 93 |
+
|
| 94 |
+
if options.excludes:
|
| 95 |
+
skip.update(options.excludes)
|
| 96 |
+
|
| 97 |
+
cmdoptions.check_list_path_option(options)
|
| 98 |
+
|
| 99 |
+
for line in freeze(
|
| 100 |
+
requirement=options.requirements,
|
| 101 |
+
local_only=options.local,
|
| 102 |
+
user_only=options.user,
|
| 103 |
+
paths=options.path,
|
| 104 |
+
isolated=options.isolated_mode,
|
| 105 |
+
skip=skip,
|
| 106 |
+
exclude_editable=options.exclude_editable,
|
| 107 |
+
):
|
| 108 |
+
sys.stdout.write(line + "\n")
|
| 109 |
+
return SUCCESS
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/hash.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import hashlib
|
| 2 |
+
import logging
|
| 3 |
+
import sys
|
| 4 |
+
from optparse import Values
|
| 5 |
+
from typing import List
|
| 6 |
+
|
| 7 |
+
from pip._internal.cli.base_command import Command
|
| 8 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
| 9 |
+
from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
|
| 10 |
+
from pip._internal.utils.misc import read_chunks, write_output
|
| 11 |
+
|
| 12 |
+
logger = logging.getLogger(__name__)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class HashCommand(Command):
|
| 16 |
+
"""
|
| 17 |
+
Compute a hash of a local package archive.
|
| 18 |
+
|
| 19 |
+
These can be used with --hash in a requirements file to do repeatable
|
| 20 |
+
installs.
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
usage = "%prog [options] <file> ..."
|
| 24 |
+
ignore_require_venv = True
|
| 25 |
+
|
| 26 |
+
def add_options(self) -> None:
|
| 27 |
+
self.cmd_opts.add_option(
|
| 28 |
+
"-a",
|
| 29 |
+
"--algorithm",
|
| 30 |
+
dest="algorithm",
|
| 31 |
+
choices=STRONG_HASHES,
|
| 32 |
+
action="store",
|
| 33 |
+
default=FAVORITE_HASH,
|
| 34 |
+
help="The hash algorithm to use: one of {}".format(
|
| 35 |
+
", ".join(STRONG_HASHES)
|
| 36 |
+
),
|
| 37 |
+
)
|
| 38 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 39 |
+
|
| 40 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 41 |
+
if not args:
|
| 42 |
+
self.parser.print_usage(sys.stderr)
|
| 43 |
+
return ERROR
|
| 44 |
+
|
| 45 |
+
algorithm = options.algorithm
|
| 46 |
+
for path in args:
|
| 47 |
+
write_output(
|
| 48 |
+
"%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm)
|
| 49 |
+
)
|
| 50 |
+
return SUCCESS
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def _hash_of_file(path: str, algorithm: str) -> str:
|
| 54 |
+
"""Return the hash digest of a file."""
|
| 55 |
+
with open(path, "rb") as archive:
|
| 56 |
+
hash = hashlib.new(algorithm)
|
| 57 |
+
for chunk in read_chunks(archive):
|
| 58 |
+
hash.update(chunk)
|
| 59 |
+
return hash.hexdigest()
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/help.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from optparse import Values
|
| 2 |
+
from typing import List
|
| 3 |
+
|
| 4 |
+
from pip._internal.cli.base_command import Command
|
| 5 |
+
from pip._internal.cli.status_codes import SUCCESS
|
| 6 |
+
from pip._internal.exceptions import CommandError
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class HelpCommand(Command):
|
| 10 |
+
"""Show help for commands"""
|
| 11 |
+
|
| 12 |
+
usage = """
|
| 13 |
+
%prog <command>"""
|
| 14 |
+
ignore_require_venv = True
|
| 15 |
+
|
| 16 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 17 |
+
from pip._internal.commands import (
|
| 18 |
+
commands_dict,
|
| 19 |
+
create_command,
|
| 20 |
+
get_similar_commands,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
try:
|
| 24 |
+
# 'pip help' with no args is handled by pip.__init__.parseopt()
|
| 25 |
+
cmd_name = args[0] # the command we need help for
|
| 26 |
+
except IndexError:
|
| 27 |
+
return SUCCESS
|
| 28 |
+
|
| 29 |
+
if cmd_name not in commands_dict:
|
| 30 |
+
guess = get_similar_commands(cmd_name)
|
| 31 |
+
|
| 32 |
+
msg = [f'unknown command "{cmd_name}"']
|
| 33 |
+
if guess:
|
| 34 |
+
msg.append(f'maybe you meant "{guess}"')
|
| 35 |
+
|
| 36 |
+
raise CommandError(" - ".join(msg))
|
| 37 |
+
|
| 38 |
+
command = create_command(cmd_name)
|
| 39 |
+
command.parser.print_help()
|
| 40 |
+
|
| 41 |
+
return SUCCESS
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/index.py
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from optparse import Values
|
| 3 |
+
from typing import Any, Iterable, List, Optional
|
| 4 |
+
|
| 5 |
+
from pip._vendor.packaging.version import Version
|
| 6 |
+
|
| 7 |
+
from pip._internal.cli import cmdoptions
|
| 8 |
+
from pip._internal.cli.req_command import IndexGroupCommand
|
| 9 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
| 10 |
+
from pip._internal.commands.search import print_dist_installation_info
|
| 11 |
+
from pip._internal.exceptions import CommandError, DistributionNotFound, PipError
|
| 12 |
+
from pip._internal.index.collector import LinkCollector
|
| 13 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 14 |
+
from pip._internal.models.selection_prefs import SelectionPreferences
|
| 15 |
+
from pip._internal.models.target_python import TargetPython
|
| 16 |
+
from pip._internal.network.session import PipSession
|
| 17 |
+
from pip._internal.utils.misc import write_output
|
| 18 |
+
|
| 19 |
+
logger = logging.getLogger(__name__)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class IndexCommand(IndexGroupCommand):
|
| 23 |
+
"""
|
| 24 |
+
Inspect information available from package indexes.
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
ignore_require_venv = True
|
| 28 |
+
usage = """
|
| 29 |
+
%prog versions <package>
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
def add_options(self) -> None:
|
| 33 |
+
cmdoptions.add_target_python_options(self.cmd_opts)
|
| 34 |
+
|
| 35 |
+
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
| 36 |
+
self.cmd_opts.add_option(cmdoptions.pre())
|
| 37 |
+
self.cmd_opts.add_option(cmdoptions.no_binary())
|
| 38 |
+
self.cmd_opts.add_option(cmdoptions.only_binary())
|
| 39 |
+
|
| 40 |
+
index_opts = cmdoptions.make_option_group(
|
| 41 |
+
cmdoptions.index_group,
|
| 42 |
+
self.parser,
|
| 43 |
+
)
|
| 44 |
+
|
| 45 |
+
self.parser.insert_option_group(0, index_opts)
|
| 46 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 47 |
+
|
| 48 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 49 |
+
handlers = {
|
| 50 |
+
"versions": self.get_available_package_versions,
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
logger.warning(
|
| 54 |
+
"pip index is currently an experimental command. "
|
| 55 |
+
"It may be removed/changed in a future release "
|
| 56 |
+
"without prior warning."
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
# Determine action
|
| 60 |
+
if not args or args[0] not in handlers:
|
| 61 |
+
logger.error(
|
| 62 |
+
"Need an action (%s) to perform.",
|
| 63 |
+
", ".join(sorted(handlers)),
|
| 64 |
+
)
|
| 65 |
+
return ERROR
|
| 66 |
+
|
| 67 |
+
action = args[0]
|
| 68 |
+
|
| 69 |
+
# Error handling happens here, not in the action-handlers.
|
| 70 |
+
try:
|
| 71 |
+
handlers[action](options, args[1:])
|
| 72 |
+
except PipError as e:
|
| 73 |
+
logger.error(e.args[0])
|
| 74 |
+
return ERROR
|
| 75 |
+
|
| 76 |
+
return SUCCESS
|
| 77 |
+
|
| 78 |
+
def _build_package_finder(
|
| 79 |
+
self,
|
| 80 |
+
options: Values,
|
| 81 |
+
session: PipSession,
|
| 82 |
+
target_python: Optional[TargetPython] = None,
|
| 83 |
+
ignore_requires_python: Optional[bool] = None,
|
| 84 |
+
) -> PackageFinder:
|
| 85 |
+
"""
|
| 86 |
+
Create a package finder appropriate to the index command.
|
| 87 |
+
"""
|
| 88 |
+
link_collector = LinkCollector.create(session, options=options)
|
| 89 |
+
|
| 90 |
+
# Pass allow_yanked=False to ignore yanked versions.
|
| 91 |
+
selection_prefs = SelectionPreferences(
|
| 92 |
+
allow_yanked=False,
|
| 93 |
+
allow_all_prereleases=options.pre,
|
| 94 |
+
ignore_requires_python=ignore_requires_python,
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
return PackageFinder.create(
|
| 98 |
+
link_collector=link_collector,
|
| 99 |
+
selection_prefs=selection_prefs,
|
| 100 |
+
target_python=target_python,
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
def get_available_package_versions(self, options: Values, args: List[Any]) -> None:
|
| 104 |
+
if len(args) != 1:
|
| 105 |
+
raise CommandError("You need to specify exactly one argument")
|
| 106 |
+
|
| 107 |
+
target_python = cmdoptions.make_target_python(options)
|
| 108 |
+
query = args[0]
|
| 109 |
+
|
| 110 |
+
with self._build_session(options) as session:
|
| 111 |
+
finder = self._build_package_finder(
|
| 112 |
+
options=options,
|
| 113 |
+
session=session,
|
| 114 |
+
target_python=target_python,
|
| 115 |
+
ignore_requires_python=options.ignore_requires_python,
|
| 116 |
+
)
|
| 117 |
+
|
| 118 |
+
versions: Iterable[Version] = (
|
| 119 |
+
candidate.version for candidate in finder.find_all_candidates(query)
|
| 120 |
+
)
|
| 121 |
+
|
| 122 |
+
if not options.pre:
|
| 123 |
+
# Remove prereleases
|
| 124 |
+
versions = (
|
| 125 |
+
version for version in versions if not version.is_prerelease
|
| 126 |
+
)
|
| 127 |
+
versions = set(versions)
|
| 128 |
+
|
| 129 |
+
if not versions:
|
| 130 |
+
raise DistributionNotFound(
|
| 131 |
+
f"No matching distribution found for {query}"
|
| 132 |
+
)
|
| 133 |
+
|
| 134 |
+
formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
|
| 135 |
+
latest = formatted_versions[0]
|
| 136 |
+
|
| 137 |
+
write_output(f"{query} ({latest})")
|
| 138 |
+
write_output("Available versions: {}".format(", ".join(formatted_versions)))
|
| 139 |
+
print_dist_installation_info(query, latest)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/install.py
ADDED
|
@@ -0,0 +1,783 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import errno
|
| 2 |
+
import json
|
| 3 |
+
import operator
|
| 4 |
+
import os
|
| 5 |
+
import shutil
|
| 6 |
+
import site
|
| 7 |
+
from optparse import SUPPRESS_HELP, Values
|
| 8 |
+
from typing import List, Optional
|
| 9 |
+
|
| 10 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 11 |
+
from pip._vendor.rich import print_json
|
| 12 |
+
|
| 13 |
+
from pip._internal.cache import WheelCache
|
| 14 |
+
from pip._internal.cli import cmdoptions
|
| 15 |
+
from pip._internal.cli.cmdoptions import make_target_python
|
| 16 |
+
from pip._internal.cli.req_command import (
|
| 17 |
+
RequirementCommand,
|
| 18 |
+
with_cleanup,
|
| 19 |
+
)
|
| 20 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
| 21 |
+
from pip._internal.exceptions import CommandError, InstallationError
|
| 22 |
+
from pip._internal.locations import get_scheme
|
| 23 |
+
from pip._internal.metadata import get_environment
|
| 24 |
+
from pip._internal.models.installation_report import InstallationReport
|
| 25 |
+
from pip._internal.operations.build.build_tracker import get_build_tracker
|
| 26 |
+
from pip._internal.operations.check import ConflictDetails, check_install_conflicts
|
| 27 |
+
from pip._internal.req import install_given_reqs
|
| 28 |
+
from pip._internal.req.req_install import (
|
| 29 |
+
InstallRequirement,
|
| 30 |
+
check_legacy_setup_py_options,
|
| 31 |
+
)
|
| 32 |
+
from pip._internal.utils.compat import WINDOWS
|
| 33 |
+
from pip._internal.utils.filesystem import test_writable_dir
|
| 34 |
+
from pip._internal.utils.logging import getLogger
|
| 35 |
+
from pip._internal.utils.misc import (
|
| 36 |
+
check_externally_managed,
|
| 37 |
+
ensure_dir,
|
| 38 |
+
get_pip_version,
|
| 39 |
+
protect_pip_from_modification_on_windows,
|
| 40 |
+
warn_if_run_as_root,
|
| 41 |
+
write_output,
|
| 42 |
+
)
|
| 43 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
| 44 |
+
from pip._internal.utils.virtualenv import (
|
| 45 |
+
running_under_virtualenv,
|
| 46 |
+
virtualenv_no_global,
|
| 47 |
+
)
|
| 48 |
+
from pip._internal.wheel_builder import build, should_build_for_install_command
|
| 49 |
+
|
| 50 |
+
logger = getLogger(__name__)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class InstallCommand(RequirementCommand):
|
| 54 |
+
"""
|
| 55 |
+
Install packages from:
|
| 56 |
+
|
| 57 |
+
- PyPI (and other indexes) using requirement specifiers.
|
| 58 |
+
- VCS project urls.
|
| 59 |
+
- Local project directories.
|
| 60 |
+
- Local or remote source archives.
|
| 61 |
+
|
| 62 |
+
pip also supports installing from "requirements files", which provide
|
| 63 |
+
an easy way to specify a whole environment to be installed.
|
| 64 |
+
"""
|
| 65 |
+
|
| 66 |
+
usage = """
|
| 67 |
+
%prog [options] <requirement specifier> [package-index-options] ...
|
| 68 |
+
%prog [options] -r <requirements file> [package-index-options] ...
|
| 69 |
+
%prog [options] [-e] <vcs project url> ...
|
| 70 |
+
%prog [options] [-e] <local project path> ...
|
| 71 |
+
%prog [options] <archive url/path> ..."""
|
| 72 |
+
|
| 73 |
+
def add_options(self) -> None:
|
| 74 |
+
self.cmd_opts.add_option(cmdoptions.requirements())
|
| 75 |
+
self.cmd_opts.add_option(cmdoptions.constraints())
|
| 76 |
+
self.cmd_opts.add_option(cmdoptions.no_deps())
|
| 77 |
+
self.cmd_opts.add_option(cmdoptions.pre())
|
| 78 |
+
|
| 79 |
+
self.cmd_opts.add_option(cmdoptions.editable())
|
| 80 |
+
self.cmd_opts.add_option(
|
| 81 |
+
"--dry-run",
|
| 82 |
+
action="store_true",
|
| 83 |
+
dest="dry_run",
|
| 84 |
+
default=False,
|
| 85 |
+
help=(
|
| 86 |
+
"Don't actually install anything, just print what would be. "
|
| 87 |
+
"Can be used in combination with --ignore-installed "
|
| 88 |
+
"to 'resolve' the requirements."
|
| 89 |
+
),
|
| 90 |
+
)
|
| 91 |
+
self.cmd_opts.add_option(
|
| 92 |
+
"-t",
|
| 93 |
+
"--target",
|
| 94 |
+
dest="target_dir",
|
| 95 |
+
metavar="dir",
|
| 96 |
+
default=None,
|
| 97 |
+
help=(
|
| 98 |
+
"Install packages into <dir>. "
|
| 99 |
+
"By default this will not replace existing files/folders in "
|
| 100 |
+
"<dir>. Use --upgrade to replace existing packages in <dir> "
|
| 101 |
+
"with new versions."
|
| 102 |
+
),
|
| 103 |
+
)
|
| 104 |
+
cmdoptions.add_target_python_options(self.cmd_opts)
|
| 105 |
+
|
| 106 |
+
self.cmd_opts.add_option(
|
| 107 |
+
"--user",
|
| 108 |
+
dest="use_user_site",
|
| 109 |
+
action="store_true",
|
| 110 |
+
help=(
|
| 111 |
+
"Install to the Python user install directory for your "
|
| 112 |
+
"platform. Typically ~/.local/, or %APPDATA%\\Python on "
|
| 113 |
+
"Windows. (See the Python documentation for site.USER_BASE "
|
| 114 |
+
"for full details.)"
|
| 115 |
+
),
|
| 116 |
+
)
|
| 117 |
+
self.cmd_opts.add_option(
|
| 118 |
+
"--no-user",
|
| 119 |
+
dest="use_user_site",
|
| 120 |
+
action="store_false",
|
| 121 |
+
help=SUPPRESS_HELP,
|
| 122 |
+
)
|
| 123 |
+
self.cmd_opts.add_option(
|
| 124 |
+
"--root",
|
| 125 |
+
dest="root_path",
|
| 126 |
+
metavar="dir",
|
| 127 |
+
default=None,
|
| 128 |
+
help="Install everything relative to this alternate root directory.",
|
| 129 |
+
)
|
| 130 |
+
self.cmd_opts.add_option(
|
| 131 |
+
"--prefix",
|
| 132 |
+
dest="prefix_path",
|
| 133 |
+
metavar="dir",
|
| 134 |
+
default=None,
|
| 135 |
+
help=(
|
| 136 |
+
"Installation prefix where lib, bin and other top-level "
|
| 137 |
+
"folders are placed. Note that the resulting installation may "
|
| 138 |
+
"contain scripts and other resources which reference the "
|
| 139 |
+
"Python interpreter of pip, and not that of ``--prefix``. "
|
| 140 |
+
"See also the ``--python`` option if the intention is to "
|
| 141 |
+
"install packages into another (possibly pip-free) "
|
| 142 |
+
"environment."
|
| 143 |
+
),
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
self.cmd_opts.add_option(cmdoptions.src())
|
| 147 |
+
|
| 148 |
+
self.cmd_opts.add_option(
|
| 149 |
+
"-U",
|
| 150 |
+
"--upgrade",
|
| 151 |
+
dest="upgrade",
|
| 152 |
+
action="store_true",
|
| 153 |
+
help=(
|
| 154 |
+
"Upgrade all specified packages to the newest available "
|
| 155 |
+
"version. The handling of dependencies depends on the "
|
| 156 |
+
"upgrade-strategy used."
|
| 157 |
+
),
|
| 158 |
+
)
|
| 159 |
+
|
| 160 |
+
self.cmd_opts.add_option(
|
| 161 |
+
"--upgrade-strategy",
|
| 162 |
+
dest="upgrade_strategy",
|
| 163 |
+
default="only-if-needed",
|
| 164 |
+
choices=["only-if-needed", "eager"],
|
| 165 |
+
help=(
|
| 166 |
+
"Determines how dependency upgrading should be handled "
|
| 167 |
+
"[default: %default]. "
|
| 168 |
+
'"eager" - dependencies are upgraded regardless of '
|
| 169 |
+
"whether the currently installed version satisfies the "
|
| 170 |
+
"requirements of the upgraded package(s). "
|
| 171 |
+
'"only-if-needed" - are upgraded only when they do not '
|
| 172 |
+
"satisfy the requirements of the upgraded package(s)."
|
| 173 |
+
),
|
| 174 |
+
)
|
| 175 |
+
|
| 176 |
+
self.cmd_opts.add_option(
|
| 177 |
+
"--force-reinstall",
|
| 178 |
+
dest="force_reinstall",
|
| 179 |
+
action="store_true",
|
| 180 |
+
help="Reinstall all packages even if they are already up-to-date.",
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
self.cmd_opts.add_option(
|
| 184 |
+
"-I",
|
| 185 |
+
"--ignore-installed",
|
| 186 |
+
dest="ignore_installed",
|
| 187 |
+
action="store_true",
|
| 188 |
+
help=(
|
| 189 |
+
"Ignore the installed packages, overwriting them. "
|
| 190 |
+
"This can break your system if the existing package "
|
| 191 |
+
"is of a different version or was installed "
|
| 192 |
+
"with a different package manager!"
|
| 193 |
+
),
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
| 197 |
+
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
| 198 |
+
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
| 199 |
+
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
| 200 |
+
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
| 201 |
+
self.cmd_opts.add_option(cmdoptions.override_externally_managed())
|
| 202 |
+
|
| 203 |
+
self.cmd_opts.add_option(cmdoptions.config_settings())
|
| 204 |
+
self.cmd_opts.add_option(cmdoptions.global_options())
|
| 205 |
+
|
| 206 |
+
self.cmd_opts.add_option(
|
| 207 |
+
"--compile",
|
| 208 |
+
action="store_true",
|
| 209 |
+
dest="compile",
|
| 210 |
+
default=True,
|
| 211 |
+
help="Compile Python source files to bytecode",
|
| 212 |
+
)
|
| 213 |
+
|
| 214 |
+
self.cmd_opts.add_option(
|
| 215 |
+
"--no-compile",
|
| 216 |
+
action="store_false",
|
| 217 |
+
dest="compile",
|
| 218 |
+
help="Do not compile Python source files to bytecode",
|
| 219 |
+
)
|
| 220 |
+
|
| 221 |
+
self.cmd_opts.add_option(
|
| 222 |
+
"--no-warn-script-location",
|
| 223 |
+
action="store_false",
|
| 224 |
+
dest="warn_script_location",
|
| 225 |
+
default=True,
|
| 226 |
+
help="Do not warn when installing scripts outside PATH",
|
| 227 |
+
)
|
| 228 |
+
self.cmd_opts.add_option(
|
| 229 |
+
"--no-warn-conflicts",
|
| 230 |
+
action="store_false",
|
| 231 |
+
dest="warn_about_conflicts",
|
| 232 |
+
default=True,
|
| 233 |
+
help="Do not warn about broken dependencies",
|
| 234 |
+
)
|
| 235 |
+
self.cmd_opts.add_option(cmdoptions.no_binary())
|
| 236 |
+
self.cmd_opts.add_option(cmdoptions.only_binary())
|
| 237 |
+
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
| 238 |
+
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
| 239 |
+
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
| 240 |
+
self.cmd_opts.add_option(cmdoptions.root_user_action())
|
| 241 |
+
|
| 242 |
+
index_opts = cmdoptions.make_option_group(
|
| 243 |
+
cmdoptions.index_group,
|
| 244 |
+
self.parser,
|
| 245 |
+
)
|
| 246 |
+
|
| 247 |
+
self.parser.insert_option_group(0, index_opts)
|
| 248 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 249 |
+
|
| 250 |
+
self.cmd_opts.add_option(
|
| 251 |
+
"--report",
|
| 252 |
+
dest="json_report_file",
|
| 253 |
+
metavar="file",
|
| 254 |
+
default=None,
|
| 255 |
+
help=(
|
| 256 |
+
"Generate a JSON file describing what pip did to install "
|
| 257 |
+
"the provided requirements. "
|
| 258 |
+
"Can be used in combination with --dry-run and --ignore-installed "
|
| 259 |
+
"to 'resolve' the requirements. "
|
| 260 |
+
"When - is used as file name it writes to stdout. "
|
| 261 |
+
"When writing to stdout, please combine with the --quiet option "
|
| 262 |
+
"to avoid mixing pip logging output with JSON output."
|
| 263 |
+
),
|
| 264 |
+
)
|
| 265 |
+
|
| 266 |
+
@with_cleanup
|
| 267 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 268 |
+
if options.use_user_site and options.target_dir is not None:
|
| 269 |
+
raise CommandError("Can not combine '--user' and '--target'")
|
| 270 |
+
|
| 271 |
+
# Check whether the environment we're installing into is externally
|
| 272 |
+
# managed, as specified in PEP 668. Specifying --root, --target, or
|
| 273 |
+
# --prefix disables the check, since there's no reliable way to locate
|
| 274 |
+
# the EXTERNALLY-MANAGED file for those cases. An exception is also
|
| 275 |
+
# made specifically for "--dry-run --report" for convenience.
|
| 276 |
+
installing_into_current_environment = (
|
| 277 |
+
not (options.dry_run and options.json_report_file)
|
| 278 |
+
and options.root_path is None
|
| 279 |
+
and options.target_dir is None
|
| 280 |
+
and options.prefix_path is None
|
| 281 |
+
)
|
| 282 |
+
if (
|
| 283 |
+
installing_into_current_environment
|
| 284 |
+
and not options.override_externally_managed
|
| 285 |
+
):
|
| 286 |
+
check_externally_managed()
|
| 287 |
+
|
| 288 |
+
upgrade_strategy = "to-satisfy-only"
|
| 289 |
+
if options.upgrade:
|
| 290 |
+
upgrade_strategy = options.upgrade_strategy
|
| 291 |
+
|
| 292 |
+
cmdoptions.check_dist_restriction(options, check_target=True)
|
| 293 |
+
|
| 294 |
+
logger.verbose("Using %s", get_pip_version())
|
| 295 |
+
options.use_user_site = decide_user_install(
|
| 296 |
+
options.use_user_site,
|
| 297 |
+
prefix_path=options.prefix_path,
|
| 298 |
+
target_dir=options.target_dir,
|
| 299 |
+
root_path=options.root_path,
|
| 300 |
+
isolated_mode=options.isolated_mode,
|
| 301 |
+
)
|
| 302 |
+
|
| 303 |
+
target_temp_dir: Optional[TempDirectory] = None
|
| 304 |
+
target_temp_dir_path: Optional[str] = None
|
| 305 |
+
if options.target_dir:
|
| 306 |
+
options.ignore_installed = True
|
| 307 |
+
options.target_dir = os.path.abspath(options.target_dir)
|
| 308 |
+
if (
|
| 309 |
+
# fmt: off
|
| 310 |
+
os.path.exists(options.target_dir) and
|
| 311 |
+
not os.path.isdir(options.target_dir)
|
| 312 |
+
# fmt: on
|
| 313 |
+
):
|
| 314 |
+
raise CommandError(
|
| 315 |
+
"Target path exists but is not a directory, will not continue."
|
| 316 |
+
)
|
| 317 |
+
|
| 318 |
+
# Create a target directory for using with the target option
|
| 319 |
+
target_temp_dir = TempDirectory(kind="target")
|
| 320 |
+
target_temp_dir_path = target_temp_dir.path
|
| 321 |
+
self.enter_context(target_temp_dir)
|
| 322 |
+
|
| 323 |
+
global_options = options.global_options or []
|
| 324 |
+
|
| 325 |
+
session = self.get_default_session(options)
|
| 326 |
+
|
| 327 |
+
target_python = make_target_python(options)
|
| 328 |
+
finder = self._build_package_finder(
|
| 329 |
+
options=options,
|
| 330 |
+
session=session,
|
| 331 |
+
target_python=target_python,
|
| 332 |
+
ignore_requires_python=options.ignore_requires_python,
|
| 333 |
+
)
|
| 334 |
+
build_tracker = self.enter_context(get_build_tracker())
|
| 335 |
+
|
| 336 |
+
directory = TempDirectory(
|
| 337 |
+
delete=not options.no_clean,
|
| 338 |
+
kind="install",
|
| 339 |
+
globally_managed=True,
|
| 340 |
+
)
|
| 341 |
+
|
| 342 |
+
try:
|
| 343 |
+
reqs = self.get_requirements(args, options, finder, session)
|
| 344 |
+
check_legacy_setup_py_options(options, reqs)
|
| 345 |
+
|
| 346 |
+
wheel_cache = WheelCache(options.cache_dir)
|
| 347 |
+
|
| 348 |
+
# Only when installing is it permitted to use PEP 660.
|
| 349 |
+
# In other circumstances (pip wheel, pip download) we generate
|
| 350 |
+
# regular (i.e. non editable) metadata and wheels.
|
| 351 |
+
for req in reqs:
|
| 352 |
+
req.permit_editable_wheels = True
|
| 353 |
+
|
| 354 |
+
preparer = self.make_requirement_preparer(
|
| 355 |
+
temp_build_dir=directory,
|
| 356 |
+
options=options,
|
| 357 |
+
build_tracker=build_tracker,
|
| 358 |
+
session=session,
|
| 359 |
+
finder=finder,
|
| 360 |
+
use_user_site=options.use_user_site,
|
| 361 |
+
verbosity=self.verbosity,
|
| 362 |
+
)
|
| 363 |
+
resolver = self.make_resolver(
|
| 364 |
+
preparer=preparer,
|
| 365 |
+
finder=finder,
|
| 366 |
+
options=options,
|
| 367 |
+
wheel_cache=wheel_cache,
|
| 368 |
+
use_user_site=options.use_user_site,
|
| 369 |
+
ignore_installed=options.ignore_installed,
|
| 370 |
+
ignore_requires_python=options.ignore_requires_python,
|
| 371 |
+
force_reinstall=options.force_reinstall,
|
| 372 |
+
upgrade_strategy=upgrade_strategy,
|
| 373 |
+
use_pep517=options.use_pep517,
|
| 374 |
+
py_version_info=options.python_version,
|
| 375 |
+
)
|
| 376 |
+
|
| 377 |
+
self.trace_basic_info(finder)
|
| 378 |
+
|
| 379 |
+
requirement_set = resolver.resolve(
|
| 380 |
+
reqs, check_supported_wheels=not options.target_dir
|
| 381 |
+
)
|
| 382 |
+
|
| 383 |
+
if options.json_report_file:
|
| 384 |
+
report = InstallationReport(requirement_set.requirements_to_install)
|
| 385 |
+
if options.json_report_file == "-":
|
| 386 |
+
print_json(data=report.to_dict())
|
| 387 |
+
else:
|
| 388 |
+
with open(options.json_report_file, "w", encoding="utf-8") as f:
|
| 389 |
+
json.dump(report.to_dict(), f, indent=2, ensure_ascii=False)
|
| 390 |
+
|
| 391 |
+
if options.dry_run:
|
| 392 |
+
would_install_items = sorted(
|
| 393 |
+
(r.metadata["name"], r.metadata["version"])
|
| 394 |
+
for r in requirement_set.requirements_to_install
|
| 395 |
+
)
|
| 396 |
+
if would_install_items:
|
| 397 |
+
write_output(
|
| 398 |
+
"Would install %s",
|
| 399 |
+
" ".join("-".join(item) for item in would_install_items),
|
| 400 |
+
)
|
| 401 |
+
return SUCCESS
|
| 402 |
+
|
| 403 |
+
try:
|
| 404 |
+
pip_req = requirement_set.get_requirement("pip")
|
| 405 |
+
except KeyError:
|
| 406 |
+
modifying_pip = False
|
| 407 |
+
else:
|
| 408 |
+
# If we're not replacing an already installed pip,
|
| 409 |
+
# we're not modifying it.
|
| 410 |
+
modifying_pip = pip_req.satisfied_by is None
|
| 411 |
+
if modifying_pip:
|
| 412 |
+
# Eagerly import this module to avoid crashes. Otherwise, this
|
| 413 |
+
# module would be imported *after* pip was replaced, resulting in
|
| 414 |
+
# crashes if the new self_outdated_check module was incompatible
|
| 415 |
+
# with the rest of pip that's already imported.
|
| 416 |
+
import pip._internal.self_outdated_check # noqa: F401
|
| 417 |
+
protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)
|
| 418 |
+
|
| 419 |
+
reqs_to_build = [
|
| 420 |
+
r
|
| 421 |
+
for r in requirement_set.requirements.values()
|
| 422 |
+
if should_build_for_install_command(r)
|
| 423 |
+
]
|
| 424 |
+
|
| 425 |
+
_, build_failures = build(
|
| 426 |
+
reqs_to_build,
|
| 427 |
+
wheel_cache=wheel_cache,
|
| 428 |
+
verify=True,
|
| 429 |
+
build_options=[],
|
| 430 |
+
global_options=global_options,
|
| 431 |
+
)
|
| 432 |
+
|
| 433 |
+
if build_failures:
|
| 434 |
+
raise InstallationError(
|
| 435 |
+
"ERROR: Failed to build installable wheels for some "
|
| 436 |
+
"pyproject.toml based projects ({})".format(
|
| 437 |
+
", ".join(r.name for r in build_failures) # type: ignore
|
| 438 |
+
)
|
| 439 |
+
)
|
| 440 |
+
|
| 441 |
+
to_install = resolver.get_installation_order(requirement_set)
|
| 442 |
+
|
| 443 |
+
# Check for conflicts in the package set we're installing.
|
| 444 |
+
conflicts: Optional[ConflictDetails] = None
|
| 445 |
+
should_warn_about_conflicts = (
|
| 446 |
+
not options.ignore_dependencies and options.warn_about_conflicts
|
| 447 |
+
)
|
| 448 |
+
if should_warn_about_conflicts:
|
| 449 |
+
conflicts = self._determine_conflicts(to_install)
|
| 450 |
+
|
| 451 |
+
# Don't warn about script install locations if
|
| 452 |
+
# --target or --prefix has been specified
|
| 453 |
+
warn_script_location = options.warn_script_location
|
| 454 |
+
if options.target_dir or options.prefix_path:
|
| 455 |
+
warn_script_location = False
|
| 456 |
+
|
| 457 |
+
installed = install_given_reqs(
|
| 458 |
+
to_install,
|
| 459 |
+
global_options,
|
| 460 |
+
root=options.root_path,
|
| 461 |
+
home=target_temp_dir_path,
|
| 462 |
+
prefix=options.prefix_path,
|
| 463 |
+
warn_script_location=warn_script_location,
|
| 464 |
+
use_user_site=options.use_user_site,
|
| 465 |
+
pycompile=options.compile,
|
| 466 |
+
)
|
| 467 |
+
|
| 468 |
+
lib_locations = get_lib_location_guesses(
|
| 469 |
+
user=options.use_user_site,
|
| 470 |
+
home=target_temp_dir_path,
|
| 471 |
+
root=options.root_path,
|
| 472 |
+
prefix=options.prefix_path,
|
| 473 |
+
isolated=options.isolated_mode,
|
| 474 |
+
)
|
| 475 |
+
env = get_environment(lib_locations)
|
| 476 |
+
|
| 477 |
+
# Display a summary of installed packages, with extra care to
|
| 478 |
+
# display a package name as it was requested by the user.
|
| 479 |
+
installed.sort(key=operator.attrgetter("name"))
|
| 480 |
+
summary = []
|
| 481 |
+
installed_versions = {}
|
| 482 |
+
for distribution in env.iter_all_distributions():
|
| 483 |
+
installed_versions[distribution.canonical_name] = distribution.version
|
| 484 |
+
for package in installed:
|
| 485 |
+
display_name = package.name
|
| 486 |
+
version = installed_versions.get(canonicalize_name(display_name), None)
|
| 487 |
+
if version:
|
| 488 |
+
text = f"{display_name}-{version}"
|
| 489 |
+
else:
|
| 490 |
+
text = display_name
|
| 491 |
+
summary.append(text)
|
| 492 |
+
|
| 493 |
+
if conflicts is not None:
|
| 494 |
+
self._warn_about_conflicts(
|
| 495 |
+
conflicts,
|
| 496 |
+
resolver_variant=self.determine_resolver_variant(options),
|
| 497 |
+
)
|
| 498 |
+
|
| 499 |
+
installed_desc = " ".join(summary)
|
| 500 |
+
if installed_desc:
|
| 501 |
+
write_output(
|
| 502 |
+
"Successfully installed %s",
|
| 503 |
+
installed_desc,
|
| 504 |
+
)
|
| 505 |
+
except OSError as error:
|
| 506 |
+
show_traceback = self.verbosity >= 1
|
| 507 |
+
|
| 508 |
+
message = create_os_error_message(
|
| 509 |
+
error,
|
| 510 |
+
show_traceback,
|
| 511 |
+
options.use_user_site,
|
| 512 |
+
)
|
| 513 |
+
logger.error(message, exc_info=show_traceback)
|
| 514 |
+
|
| 515 |
+
return ERROR
|
| 516 |
+
|
| 517 |
+
if options.target_dir:
|
| 518 |
+
assert target_temp_dir
|
| 519 |
+
self._handle_target_dir(
|
| 520 |
+
options.target_dir, target_temp_dir, options.upgrade
|
| 521 |
+
)
|
| 522 |
+
if options.root_user_action == "warn":
|
| 523 |
+
warn_if_run_as_root()
|
| 524 |
+
return SUCCESS
|
| 525 |
+
|
| 526 |
+
def _handle_target_dir(
|
| 527 |
+
self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool
|
| 528 |
+
) -> None:
|
| 529 |
+
ensure_dir(target_dir)
|
| 530 |
+
|
| 531 |
+
# Checking both purelib and platlib directories for installed
|
| 532 |
+
# packages to be moved to target directory
|
| 533 |
+
lib_dir_list = []
|
| 534 |
+
|
| 535 |
+
# Checking both purelib and platlib directories for installed
|
| 536 |
+
# packages to be moved to target directory
|
| 537 |
+
scheme = get_scheme("", home=target_temp_dir.path)
|
| 538 |
+
purelib_dir = scheme.purelib
|
| 539 |
+
platlib_dir = scheme.platlib
|
| 540 |
+
data_dir = scheme.data
|
| 541 |
+
|
| 542 |
+
if os.path.exists(purelib_dir):
|
| 543 |
+
lib_dir_list.append(purelib_dir)
|
| 544 |
+
if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
|
| 545 |
+
lib_dir_list.append(platlib_dir)
|
| 546 |
+
if os.path.exists(data_dir):
|
| 547 |
+
lib_dir_list.append(data_dir)
|
| 548 |
+
|
| 549 |
+
for lib_dir in lib_dir_list:
|
| 550 |
+
for item in os.listdir(lib_dir):
|
| 551 |
+
if lib_dir == data_dir:
|
| 552 |
+
ddir = os.path.join(data_dir, item)
|
| 553 |
+
if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
|
| 554 |
+
continue
|
| 555 |
+
target_item_dir = os.path.join(target_dir, item)
|
| 556 |
+
if os.path.exists(target_item_dir):
|
| 557 |
+
if not upgrade:
|
| 558 |
+
logger.warning(
|
| 559 |
+
"Target directory %s already exists. Specify "
|
| 560 |
+
"--upgrade to force replacement.",
|
| 561 |
+
target_item_dir,
|
| 562 |
+
)
|
| 563 |
+
continue
|
| 564 |
+
if os.path.islink(target_item_dir):
|
| 565 |
+
logger.warning(
|
| 566 |
+
"Target directory %s already exists and is "
|
| 567 |
+
"a link. pip will not automatically replace "
|
| 568 |
+
"links, please remove if replacement is "
|
| 569 |
+
"desired.",
|
| 570 |
+
target_item_dir,
|
| 571 |
+
)
|
| 572 |
+
continue
|
| 573 |
+
if os.path.isdir(target_item_dir):
|
| 574 |
+
shutil.rmtree(target_item_dir)
|
| 575 |
+
else:
|
| 576 |
+
os.remove(target_item_dir)
|
| 577 |
+
|
| 578 |
+
shutil.move(os.path.join(lib_dir, item), target_item_dir)
|
| 579 |
+
|
| 580 |
+
def _determine_conflicts(
|
| 581 |
+
self, to_install: List[InstallRequirement]
|
| 582 |
+
) -> Optional[ConflictDetails]:
|
| 583 |
+
try:
|
| 584 |
+
return check_install_conflicts(to_install)
|
| 585 |
+
except Exception:
|
| 586 |
+
logger.exception(
|
| 587 |
+
"Error while checking for conflicts. Please file an issue on "
|
| 588 |
+
"pip's issue tracker: https://github.com/pypa/pip/issues/new"
|
| 589 |
+
)
|
| 590 |
+
return None
|
| 591 |
+
|
| 592 |
+
def _warn_about_conflicts(
|
| 593 |
+
self, conflict_details: ConflictDetails, resolver_variant: str
|
| 594 |
+
) -> None:
|
| 595 |
+
package_set, (missing, conflicting) = conflict_details
|
| 596 |
+
if not missing and not conflicting:
|
| 597 |
+
return
|
| 598 |
+
|
| 599 |
+
parts: List[str] = []
|
| 600 |
+
if resolver_variant == "legacy":
|
| 601 |
+
parts.append(
|
| 602 |
+
"pip's legacy dependency resolver does not consider dependency "
|
| 603 |
+
"conflicts when selecting packages. This behaviour is the "
|
| 604 |
+
"source of the following dependency conflicts."
|
| 605 |
+
)
|
| 606 |
+
else:
|
| 607 |
+
assert resolver_variant == "resolvelib"
|
| 608 |
+
parts.append(
|
| 609 |
+
"pip's dependency resolver does not currently take into account "
|
| 610 |
+
"all the packages that are installed. This behaviour is the "
|
| 611 |
+
"source of the following dependency conflicts."
|
| 612 |
+
)
|
| 613 |
+
|
| 614 |
+
# NOTE: There is some duplication here, with commands/check.py
|
| 615 |
+
for project_name in missing:
|
| 616 |
+
version = package_set[project_name][0]
|
| 617 |
+
for dependency in missing[project_name]:
|
| 618 |
+
message = (
|
| 619 |
+
f"{project_name} {version} requires {dependency[1]}, "
|
| 620 |
+
"which is not installed."
|
| 621 |
+
)
|
| 622 |
+
parts.append(message)
|
| 623 |
+
|
| 624 |
+
for project_name in conflicting:
|
| 625 |
+
version = package_set[project_name][0]
|
| 626 |
+
for dep_name, dep_version, req in conflicting[project_name]:
|
| 627 |
+
message = (
|
| 628 |
+
"{name} {version} requires {requirement}, but {you} have "
|
| 629 |
+
"{dep_name} {dep_version} which is incompatible."
|
| 630 |
+
).format(
|
| 631 |
+
name=project_name,
|
| 632 |
+
version=version,
|
| 633 |
+
requirement=req,
|
| 634 |
+
dep_name=dep_name,
|
| 635 |
+
dep_version=dep_version,
|
| 636 |
+
you=("you" if resolver_variant == "resolvelib" else "you'll"),
|
| 637 |
+
)
|
| 638 |
+
parts.append(message)
|
| 639 |
+
|
| 640 |
+
logger.critical("\n".join(parts))
|
| 641 |
+
|
| 642 |
+
|
| 643 |
+
def get_lib_location_guesses(
|
| 644 |
+
user: bool = False,
|
| 645 |
+
home: Optional[str] = None,
|
| 646 |
+
root: Optional[str] = None,
|
| 647 |
+
isolated: bool = False,
|
| 648 |
+
prefix: Optional[str] = None,
|
| 649 |
+
) -> List[str]:
|
| 650 |
+
scheme = get_scheme(
|
| 651 |
+
"",
|
| 652 |
+
user=user,
|
| 653 |
+
home=home,
|
| 654 |
+
root=root,
|
| 655 |
+
isolated=isolated,
|
| 656 |
+
prefix=prefix,
|
| 657 |
+
)
|
| 658 |
+
return [scheme.purelib, scheme.platlib]
|
| 659 |
+
|
| 660 |
+
|
| 661 |
+
def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
|
| 662 |
+
return all(
|
| 663 |
+
test_writable_dir(d)
|
| 664 |
+
for d in set(get_lib_location_guesses(root=root, isolated=isolated))
|
| 665 |
+
)
|
| 666 |
+
|
| 667 |
+
|
| 668 |
+
def decide_user_install(
|
| 669 |
+
use_user_site: Optional[bool],
|
| 670 |
+
prefix_path: Optional[str] = None,
|
| 671 |
+
target_dir: Optional[str] = None,
|
| 672 |
+
root_path: Optional[str] = None,
|
| 673 |
+
isolated_mode: bool = False,
|
| 674 |
+
) -> bool:
|
| 675 |
+
"""Determine whether to do a user install based on the input options.
|
| 676 |
+
|
| 677 |
+
If use_user_site is False, no additional checks are done.
|
| 678 |
+
If use_user_site is True, it is checked for compatibility with other
|
| 679 |
+
options.
|
| 680 |
+
If use_user_site is None, the default behaviour depends on the environment,
|
| 681 |
+
which is provided by the other arguments.
|
| 682 |
+
"""
|
| 683 |
+
# In some cases (config from tox), use_user_site can be set to an integer
|
| 684 |
+
# rather than a bool, which 'use_user_site is False' wouldn't catch.
|
| 685 |
+
if (use_user_site is not None) and (not use_user_site):
|
| 686 |
+
logger.debug("Non-user install by explicit request")
|
| 687 |
+
return False
|
| 688 |
+
|
| 689 |
+
if use_user_site:
|
| 690 |
+
if prefix_path:
|
| 691 |
+
raise CommandError(
|
| 692 |
+
"Can not combine '--user' and '--prefix' as they imply "
|
| 693 |
+
"different installation locations"
|
| 694 |
+
)
|
| 695 |
+
if virtualenv_no_global():
|
| 696 |
+
raise InstallationError(
|
| 697 |
+
"Can not perform a '--user' install. User site-packages "
|
| 698 |
+
"are not visible in this virtualenv."
|
| 699 |
+
)
|
| 700 |
+
logger.debug("User install by explicit request")
|
| 701 |
+
return True
|
| 702 |
+
|
| 703 |
+
# If we are here, user installs have not been explicitly requested/avoided
|
| 704 |
+
assert use_user_site is None
|
| 705 |
+
|
| 706 |
+
# user install incompatible with --prefix/--target
|
| 707 |
+
if prefix_path or target_dir:
|
| 708 |
+
logger.debug("Non-user install due to --prefix or --target option")
|
| 709 |
+
return False
|
| 710 |
+
|
| 711 |
+
# If user installs are not enabled, choose a non-user install
|
| 712 |
+
if not site.ENABLE_USER_SITE:
|
| 713 |
+
logger.debug("Non-user install because user site-packages disabled")
|
| 714 |
+
return False
|
| 715 |
+
|
| 716 |
+
# If we have permission for a non-user install, do that,
|
| 717 |
+
# otherwise do a user install.
|
| 718 |
+
if site_packages_writable(root=root_path, isolated=isolated_mode):
|
| 719 |
+
logger.debug("Non-user install because site-packages writeable")
|
| 720 |
+
return False
|
| 721 |
+
|
| 722 |
+
logger.info(
|
| 723 |
+
"Defaulting to user installation because normal site-packages "
|
| 724 |
+
"is not writeable"
|
| 725 |
+
)
|
| 726 |
+
return True
|
| 727 |
+
|
| 728 |
+
|
| 729 |
+
def create_os_error_message(
|
| 730 |
+
error: OSError, show_traceback: bool, using_user_site: bool
|
| 731 |
+
) -> str:
|
| 732 |
+
"""Format an error message for an OSError
|
| 733 |
+
|
| 734 |
+
It may occur anytime during the execution of the install command.
|
| 735 |
+
"""
|
| 736 |
+
parts = []
|
| 737 |
+
|
| 738 |
+
# Mention the error if we are not going to show a traceback
|
| 739 |
+
parts.append("Could not install packages due to an OSError")
|
| 740 |
+
if not show_traceback:
|
| 741 |
+
parts.append(": ")
|
| 742 |
+
parts.append(str(error))
|
| 743 |
+
else:
|
| 744 |
+
parts.append(".")
|
| 745 |
+
|
| 746 |
+
# Spilt the error indication from a helper message (if any)
|
| 747 |
+
parts[-1] += "\n"
|
| 748 |
+
|
| 749 |
+
# Suggest useful actions to the user:
|
| 750 |
+
# (1) using user site-packages or (2) verifying the permissions
|
| 751 |
+
if error.errno == errno.EACCES:
|
| 752 |
+
user_option_part = "Consider using the `--user` option"
|
| 753 |
+
permissions_part = "Check the permissions"
|
| 754 |
+
|
| 755 |
+
if not running_under_virtualenv() and not using_user_site:
|
| 756 |
+
parts.extend(
|
| 757 |
+
[
|
| 758 |
+
user_option_part,
|
| 759 |
+
" or ",
|
| 760 |
+
permissions_part.lower(),
|
| 761 |
+
]
|
| 762 |
+
)
|
| 763 |
+
else:
|
| 764 |
+
parts.append(permissions_part)
|
| 765 |
+
parts.append(".\n")
|
| 766 |
+
|
| 767 |
+
# Suggest the user to enable Long Paths if path length is
|
| 768 |
+
# more than 260
|
| 769 |
+
if (
|
| 770 |
+
WINDOWS
|
| 771 |
+
and error.errno == errno.ENOENT
|
| 772 |
+
and error.filename
|
| 773 |
+
and len(error.filename) > 260
|
| 774 |
+
):
|
| 775 |
+
parts.append(
|
| 776 |
+
"HINT: This error might have occurred since "
|
| 777 |
+
"this system does not have Windows Long Path "
|
| 778 |
+
"support enabled. You can find information on "
|
| 779 |
+
"how to enable this at "
|
| 780 |
+
"https://pip.pypa.io/warnings/enable-long-paths\n"
|
| 781 |
+
)
|
| 782 |
+
|
| 783 |
+
return "".join(parts).strip() + "\n"
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/search.py
ADDED
|
@@ -0,0 +1,172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import shutil
|
| 3 |
+
import sys
|
| 4 |
+
import textwrap
|
| 5 |
+
import xmlrpc.client
|
| 6 |
+
from collections import OrderedDict
|
| 7 |
+
from optparse import Values
|
| 8 |
+
from typing import TYPE_CHECKING, Dict, List, Optional, TypedDict
|
| 9 |
+
|
| 10 |
+
from pip._vendor.packaging.version import parse as parse_version
|
| 11 |
+
|
| 12 |
+
from pip._internal.cli.base_command import Command
|
| 13 |
+
from pip._internal.cli.req_command import SessionCommandMixin
|
| 14 |
+
from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
|
| 15 |
+
from pip._internal.exceptions import CommandError
|
| 16 |
+
from pip._internal.metadata import get_default_environment
|
| 17 |
+
from pip._internal.models.index import PyPI
|
| 18 |
+
from pip._internal.network.xmlrpc import PipXmlrpcTransport
|
| 19 |
+
from pip._internal.utils.logging import indent_log
|
| 20 |
+
from pip._internal.utils.misc import write_output
|
| 21 |
+
|
| 22 |
+
if TYPE_CHECKING:
|
| 23 |
+
|
| 24 |
+
class TransformedHit(TypedDict):
|
| 25 |
+
name: str
|
| 26 |
+
summary: str
|
| 27 |
+
versions: List[str]
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
logger = logging.getLogger(__name__)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class SearchCommand(Command, SessionCommandMixin):
|
| 34 |
+
"""Search for PyPI packages whose name or summary contains <query>."""
|
| 35 |
+
|
| 36 |
+
usage = """
|
| 37 |
+
%prog [options] <query>"""
|
| 38 |
+
ignore_require_venv = True
|
| 39 |
+
|
| 40 |
+
def add_options(self) -> None:
|
| 41 |
+
self.cmd_opts.add_option(
|
| 42 |
+
"-i",
|
| 43 |
+
"--index",
|
| 44 |
+
dest="index",
|
| 45 |
+
metavar="URL",
|
| 46 |
+
default=PyPI.pypi_url,
|
| 47 |
+
help="Base URL of Python Package Index (default %default)",
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 51 |
+
|
| 52 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 53 |
+
if not args:
|
| 54 |
+
raise CommandError("Missing required argument (search query).")
|
| 55 |
+
query = args
|
| 56 |
+
pypi_hits = self.search(query, options)
|
| 57 |
+
hits = transform_hits(pypi_hits)
|
| 58 |
+
|
| 59 |
+
terminal_width = None
|
| 60 |
+
if sys.stdout.isatty():
|
| 61 |
+
terminal_width = shutil.get_terminal_size()[0]
|
| 62 |
+
|
| 63 |
+
print_results(hits, terminal_width=terminal_width)
|
| 64 |
+
if pypi_hits:
|
| 65 |
+
return SUCCESS
|
| 66 |
+
return NO_MATCHES_FOUND
|
| 67 |
+
|
| 68 |
+
def search(self, query: List[str], options: Values) -> List[Dict[str, str]]:
|
| 69 |
+
index_url = options.index
|
| 70 |
+
|
| 71 |
+
session = self.get_default_session(options)
|
| 72 |
+
|
| 73 |
+
transport = PipXmlrpcTransport(index_url, session)
|
| 74 |
+
pypi = xmlrpc.client.ServerProxy(index_url, transport)
|
| 75 |
+
try:
|
| 76 |
+
hits = pypi.search({"name": query, "summary": query}, "or")
|
| 77 |
+
except xmlrpc.client.Fault as fault:
|
| 78 |
+
message = (
|
| 79 |
+
f"XMLRPC request failed [code: {fault.faultCode}]\n{fault.faultString}"
|
| 80 |
+
)
|
| 81 |
+
raise CommandError(message)
|
| 82 |
+
assert isinstance(hits, list)
|
| 83 |
+
return hits
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
|
| 87 |
+
"""
|
| 88 |
+
The list from pypi is really a list of versions. We want a list of
|
| 89 |
+
packages with the list of versions stored inline. This converts the
|
| 90 |
+
list from pypi into one we can use.
|
| 91 |
+
"""
|
| 92 |
+
packages: Dict[str, TransformedHit] = OrderedDict()
|
| 93 |
+
for hit in hits:
|
| 94 |
+
name = hit["name"]
|
| 95 |
+
summary = hit["summary"]
|
| 96 |
+
version = hit["version"]
|
| 97 |
+
|
| 98 |
+
if name not in packages.keys():
|
| 99 |
+
packages[name] = {
|
| 100 |
+
"name": name,
|
| 101 |
+
"summary": summary,
|
| 102 |
+
"versions": [version],
|
| 103 |
+
}
|
| 104 |
+
else:
|
| 105 |
+
packages[name]["versions"].append(version)
|
| 106 |
+
|
| 107 |
+
# if this is the highest version, replace summary and score
|
| 108 |
+
if version == highest_version(packages[name]["versions"]):
|
| 109 |
+
packages[name]["summary"] = summary
|
| 110 |
+
|
| 111 |
+
return list(packages.values())
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def print_dist_installation_info(name: str, latest: str) -> None:
|
| 115 |
+
env = get_default_environment()
|
| 116 |
+
dist = env.get_distribution(name)
|
| 117 |
+
if dist is not None:
|
| 118 |
+
with indent_log():
|
| 119 |
+
if dist.version == latest:
|
| 120 |
+
write_output("INSTALLED: %s (latest)", dist.version)
|
| 121 |
+
else:
|
| 122 |
+
write_output("INSTALLED: %s", dist.version)
|
| 123 |
+
if parse_version(latest).pre:
|
| 124 |
+
write_output(
|
| 125 |
+
"LATEST: %s (pre-release; install"
|
| 126 |
+
" with `pip install --pre`)",
|
| 127 |
+
latest,
|
| 128 |
+
)
|
| 129 |
+
else:
|
| 130 |
+
write_output("LATEST: %s", latest)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
def print_results(
|
| 134 |
+
hits: List["TransformedHit"],
|
| 135 |
+
name_column_width: Optional[int] = None,
|
| 136 |
+
terminal_width: Optional[int] = None,
|
| 137 |
+
) -> None:
|
| 138 |
+
if not hits:
|
| 139 |
+
return
|
| 140 |
+
if name_column_width is None:
|
| 141 |
+
name_column_width = (
|
| 142 |
+
max(
|
| 143 |
+
[
|
| 144 |
+
len(hit["name"]) + len(highest_version(hit.get("versions", ["-"])))
|
| 145 |
+
for hit in hits
|
| 146 |
+
]
|
| 147 |
+
)
|
| 148 |
+
+ 4
|
| 149 |
+
)
|
| 150 |
+
|
| 151 |
+
for hit in hits:
|
| 152 |
+
name = hit["name"]
|
| 153 |
+
summary = hit["summary"] or ""
|
| 154 |
+
latest = highest_version(hit.get("versions", ["-"]))
|
| 155 |
+
if terminal_width is not None:
|
| 156 |
+
target_width = terminal_width - name_column_width - 5
|
| 157 |
+
if target_width > 10:
|
| 158 |
+
# wrap and indent summary to fit terminal
|
| 159 |
+
summary_lines = textwrap.wrap(summary, target_width)
|
| 160 |
+
summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines)
|
| 161 |
+
|
| 162 |
+
name_latest = f"{name} ({latest})"
|
| 163 |
+
line = f"{name_latest:{name_column_width}} - {summary}"
|
| 164 |
+
try:
|
| 165 |
+
write_output(line)
|
| 166 |
+
print_dist_installation_info(name, latest)
|
| 167 |
+
except UnicodeEncodeError:
|
| 168 |
+
pass
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
def highest_version(versions: List[str]) -> str:
|
| 172 |
+
return max(versions, key=parse_version)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/commands/show.py
ADDED
|
@@ -0,0 +1,217 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from optparse import Values
|
| 3 |
+
from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional
|
| 4 |
+
|
| 5 |
+
from pip._vendor.packaging.requirements import InvalidRequirement
|
| 6 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 7 |
+
|
| 8 |
+
from pip._internal.cli.base_command import Command
|
| 9 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
| 10 |
+
from pip._internal.metadata import BaseDistribution, get_default_environment
|
| 11 |
+
from pip._internal.utils.misc import write_output
|
| 12 |
+
|
| 13 |
+
logger = logging.getLogger(__name__)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class ShowCommand(Command):
|
| 17 |
+
"""
|
| 18 |
+
Show information about one or more installed packages.
|
| 19 |
+
|
| 20 |
+
The output is in RFC-compliant mail header format.
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
usage = """
|
| 24 |
+
%prog [options] <package> ..."""
|
| 25 |
+
ignore_require_venv = True
|
| 26 |
+
|
| 27 |
+
def add_options(self) -> None:
|
| 28 |
+
self.cmd_opts.add_option(
|
| 29 |
+
"-f",
|
| 30 |
+
"--files",
|
| 31 |
+
dest="files",
|
| 32 |
+
action="store_true",
|
| 33 |
+
default=False,
|
| 34 |
+
help="Show the full list of installed files for each package.",
|
| 35 |
+
)
|
| 36 |
+
|
| 37 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
| 38 |
+
|
| 39 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 40 |
+
if not args:
|
| 41 |
+
logger.warning("ERROR: Please provide a package name or names.")
|
| 42 |
+
return ERROR
|
| 43 |
+
query = args
|
| 44 |
+
|
| 45 |
+
results = search_packages_info(query)
|
| 46 |
+
if not print_results(
|
| 47 |
+
results, list_files=options.files, verbose=options.verbose
|
| 48 |
+
):
|
| 49 |
+
return ERROR
|
| 50 |
+
return SUCCESS
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class _PackageInfo(NamedTuple):
|
| 54 |
+
name: str
|
| 55 |
+
version: str
|
| 56 |
+
location: str
|
| 57 |
+
editable_project_location: Optional[str]
|
| 58 |
+
requires: List[str]
|
| 59 |
+
required_by: List[str]
|
| 60 |
+
installer: str
|
| 61 |
+
metadata_version: str
|
| 62 |
+
classifiers: List[str]
|
| 63 |
+
summary: str
|
| 64 |
+
homepage: str
|
| 65 |
+
project_urls: List[str]
|
| 66 |
+
author: str
|
| 67 |
+
author_email: str
|
| 68 |
+
license: str
|
| 69 |
+
entry_points: List[str]
|
| 70 |
+
files: Optional[List[str]]
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]:
|
| 74 |
+
"""
|
| 75 |
+
Gather details from installed distributions. Print distribution name,
|
| 76 |
+
version, location, and installed files. Installed files requires a
|
| 77 |
+
pip generated 'installed-files.txt' in the distributions '.egg-info'
|
| 78 |
+
directory.
|
| 79 |
+
"""
|
| 80 |
+
env = get_default_environment()
|
| 81 |
+
|
| 82 |
+
installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()}
|
| 83 |
+
query_names = [canonicalize_name(name) for name in query]
|
| 84 |
+
missing = sorted(
|
| 85 |
+
[name for name, pkg in zip(query, query_names) if pkg not in installed]
|
| 86 |
+
)
|
| 87 |
+
if missing:
|
| 88 |
+
logger.warning("Package(s) not found: %s", ", ".join(missing))
|
| 89 |
+
|
| 90 |
+
def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]:
|
| 91 |
+
return (
|
| 92 |
+
dist.metadata["Name"] or "UNKNOWN"
|
| 93 |
+
for dist in installed.values()
|
| 94 |
+
if current_dist.canonical_name
|
| 95 |
+
in {canonicalize_name(d.name) for d in dist.iter_dependencies()}
|
| 96 |
+
)
|
| 97 |
+
|
| 98 |
+
for query_name in query_names:
|
| 99 |
+
try:
|
| 100 |
+
dist = installed[query_name]
|
| 101 |
+
except KeyError:
|
| 102 |
+
continue
|
| 103 |
+
|
| 104 |
+
try:
|
| 105 |
+
requires = sorted(
|
| 106 |
+
# Avoid duplicates in requirements (e.g. due to environment markers).
|
| 107 |
+
{req.name for req in dist.iter_dependencies()},
|
| 108 |
+
key=str.lower,
|
| 109 |
+
)
|
| 110 |
+
except InvalidRequirement:
|
| 111 |
+
requires = sorted(dist.iter_raw_dependencies(), key=str.lower)
|
| 112 |
+
|
| 113 |
+
try:
|
| 114 |
+
required_by = sorted(_get_requiring_packages(dist), key=str.lower)
|
| 115 |
+
except InvalidRequirement:
|
| 116 |
+
required_by = ["#N/A"]
|
| 117 |
+
|
| 118 |
+
try:
|
| 119 |
+
entry_points_text = dist.read_text("entry_points.txt")
|
| 120 |
+
entry_points = entry_points_text.splitlines(keepends=False)
|
| 121 |
+
except FileNotFoundError:
|
| 122 |
+
entry_points = []
|
| 123 |
+
|
| 124 |
+
files_iter = dist.iter_declared_entries()
|
| 125 |
+
if files_iter is None:
|
| 126 |
+
files: Optional[List[str]] = None
|
| 127 |
+
else:
|
| 128 |
+
files = sorted(files_iter)
|
| 129 |
+
|
| 130 |
+
metadata = dist.metadata
|
| 131 |
+
|
| 132 |
+
project_urls = metadata.get_all("Project-URL", [])
|
| 133 |
+
homepage = metadata.get("Home-page", "")
|
| 134 |
+
if not homepage:
|
| 135 |
+
# It's common that there is a "homepage" Project-URL, but Home-page
|
| 136 |
+
# remains unset (especially as PEP 621 doesn't surface the field).
|
| 137 |
+
#
|
| 138 |
+
# This logic was taken from PyPI's codebase.
|
| 139 |
+
for url in project_urls:
|
| 140 |
+
url_label, url = url.split(",", maxsplit=1)
|
| 141 |
+
normalized_label = (
|
| 142 |
+
url_label.casefold().replace("-", "").replace("_", "").strip()
|
| 143 |
+
)
|
| 144 |
+
if normalized_label == "homepage":
|
| 145 |
+
homepage = url.strip()
|
| 146 |
+
break
|
| 147 |
+
|
| 148 |
+
yield _PackageInfo(
|
| 149 |
+
name=dist.raw_name,
|
| 150 |
+
version=dist.raw_version,
|
| 151 |
+
location=dist.location or "",
|
| 152 |
+
editable_project_location=dist.editable_project_location,
|
| 153 |
+
requires=requires,
|
| 154 |
+
required_by=required_by,
|
| 155 |
+
installer=dist.installer,
|
| 156 |
+
metadata_version=dist.metadata_version or "",
|
| 157 |
+
classifiers=metadata.get_all("Classifier", []),
|
| 158 |
+
summary=metadata.get("Summary", ""),
|
| 159 |
+
homepage=homepage,
|
| 160 |
+
project_urls=project_urls,
|
| 161 |
+
author=metadata.get("Author", ""),
|
| 162 |
+
author_email=metadata.get("Author-email", ""),
|
| 163 |
+
license=metadata.get("License", ""),
|
| 164 |
+
entry_points=entry_points,
|
| 165 |
+
files=files,
|
| 166 |
+
)
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
def print_results(
|
| 170 |
+
distributions: Iterable[_PackageInfo],
|
| 171 |
+
list_files: bool,
|
| 172 |
+
verbose: bool,
|
| 173 |
+
) -> bool:
|
| 174 |
+
"""
|
| 175 |
+
Print the information from installed distributions found.
|
| 176 |
+
"""
|
| 177 |
+
results_printed = False
|
| 178 |
+
for i, dist in enumerate(distributions):
|
| 179 |
+
results_printed = True
|
| 180 |
+
if i > 0:
|
| 181 |
+
write_output("---")
|
| 182 |
+
|
| 183 |
+
write_output("Name: %s", dist.name)
|
| 184 |
+
write_output("Version: %s", dist.version)
|
| 185 |
+
write_output("Summary: %s", dist.summary)
|
| 186 |
+
write_output("Home-page: %s", dist.homepage)
|
| 187 |
+
write_output("Author: %s", dist.author)
|
| 188 |
+
write_output("Author-email: %s", dist.author_email)
|
| 189 |
+
write_output("License: %s", dist.license)
|
| 190 |
+
write_output("Location: %s", dist.location)
|
| 191 |
+
if dist.editable_project_location is not None:
|
| 192 |
+
write_output(
|
| 193 |
+
"Editable project location: %s", dist.editable_project_location
|
| 194 |
+
)
|
| 195 |
+
write_output("Requires: %s", ", ".join(dist.requires))
|
| 196 |
+
write_output("Required-by: %s", ", ".join(dist.required_by))
|
| 197 |
+
|
| 198 |
+
if verbose:
|
| 199 |
+
write_output("Metadata-Version: %s", dist.metadata_version)
|
| 200 |
+
write_output("Installer: %s", dist.installer)
|
| 201 |
+
write_output("Classifiers:")
|
| 202 |
+
for classifier in dist.classifiers:
|
| 203 |
+
write_output(" %s", classifier)
|
| 204 |
+
write_output("Entry-points:")
|
| 205 |
+
for entry in dist.entry_points:
|
| 206 |
+
write_output(" %s", entry.strip())
|
| 207 |
+
write_output("Project-URLs:")
|
| 208 |
+
for project_url in dist.project_urls:
|
| 209 |
+
write_output(" %s", project_url)
|
| 210 |
+
if list_files:
|
| 211 |
+
write_output("Files:")
|
| 212 |
+
if dist.files is None:
|
| 213 |
+
write_output("Cannot locate RECORD or installed-files.txt")
|
| 214 |
+
else:
|
| 215 |
+
for line in dist.files:
|
| 216 |
+
write_output(" %s", line.strip())
|
| 217 |
+
return results_printed
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-311.pyc
ADDED
|
Binary file (2.58 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-311.pyc
ADDED
|
Binary file (7.85 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-311.pyc
ADDED
|
Binary file (4.71 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-311.pyc
ADDED
|
Binary file (3.58 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-311.pyc
ADDED
|
Binary file (8.11 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/misc.cpython-311.pyc
ADDED
|
Binary file (37.8 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-311.pyc
ADDED
|
Binary file (15.5 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/glibc.py
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
from typing import Optional, Tuple
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def glibc_version_string() -> Optional[str]:
|
| 7 |
+
"Returns glibc version string, or None if not using glibc."
|
| 8 |
+
return glibc_version_string_confstr() or glibc_version_string_ctypes()
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def glibc_version_string_confstr() -> Optional[str]:
|
| 12 |
+
"Primary implementation of glibc_version_string using os.confstr."
|
| 13 |
+
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
|
| 14 |
+
# to be broken or missing. This strategy is used in the standard library
|
| 15 |
+
# platform module:
|
| 16 |
+
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
|
| 17 |
+
if sys.platform == "win32":
|
| 18 |
+
return None
|
| 19 |
+
try:
|
| 20 |
+
gnu_libc_version = os.confstr("CS_GNU_LIBC_VERSION")
|
| 21 |
+
if gnu_libc_version is None:
|
| 22 |
+
return None
|
| 23 |
+
# os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":
|
| 24 |
+
_, version = gnu_libc_version.split()
|
| 25 |
+
except (AttributeError, OSError, ValueError):
|
| 26 |
+
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
|
| 27 |
+
return None
|
| 28 |
+
return version
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def glibc_version_string_ctypes() -> Optional[str]:
|
| 32 |
+
"Fallback implementation of glibc_version_string using ctypes."
|
| 33 |
+
|
| 34 |
+
try:
|
| 35 |
+
import ctypes
|
| 36 |
+
except ImportError:
|
| 37 |
+
return None
|
| 38 |
+
|
| 39 |
+
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
| 40 |
+
# manpage says, "If filename is NULL, then the returned handle is for the
|
| 41 |
+
# main program". This way we can let the linker do the work to figure out
|
| 42 |
+
# which libc our process is actually using.
|
| 43 |
+
#
|
| 44 |
+
# We must also handle the special case where the executable is not a
|
| 45 |
+
# dynamically linked executable. This can occur when using musl libc,
|
| 46 |
+
# for example. In this situation, dlopen() will error, leading to an
|
| 47 |
+
# OSError. Interestingly, at least in the case of musl, there is no
|
| 48 |
+
# errno set on the OSError. The single string argument used to construct
|
| 49 |
+
# OSError comes from libc itself and is therefore not portable to
|
| 50 |
+
# hard code here. In any case, failure to call dlopen() means we
|
| 51 |
+
# can't proceed, so we bail on our attempt.
|
| 52 |
+
try:
|
| 53 |
+
process_namespace = ctypes.CDLL(None)
|
| 54 |
+
except OSError:
|
| 55 |
+
return None
|
| 56 |
+
|
| 57 |
+
try:
|
| 58 |
+
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
| 59 |
+
except AttributeError:
|
| 60 |
+
# Symbol doesn't exist -> therefore, we are not linked to
|
| 61 |
+
# glibc.
|
| 62 |
+
return None
|
| 63 |
+
|
| 64 |
+
# Call gnu_get_libc_version, which returns a string like "2.5"
|
| 65 |
+
gnu_get_libc_version.restype = ctypes.c_char_p
|
| 66 |
+
version_str: str = gnu_get_libc_version()
|
| 67 |
+
# py2 / py3 compatibility:
|
| 68 |
+
if not isinstance(version_str, str):
|
| 69 |
+
version_str = version_str.decode("ascii")
|
| 70 |
+
|
| 71 |
+
return version_str
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
# platform.libc_ver regularly returns completely nonsensical glibc
|
| 75 |
+
# versions. E.g. on my computer, platform says:
|
| 76 |
+
#
|
| 77 |
+
# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
|
| 78 |
+
# ('glibc', '2.7')
|
| 79 |
+
# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
|
| 80 |
+
# ('glibc', '2.9')
|
| 81 |
+
#
|
| 82 |
+
# But the truth is:
|
| 83 |
+
#
|
| 84 |
+
# ~$ ldd --version
|
| 85 |
+
# ldd (Debian GLIBC 2.22-11) 2.22
|
| 86 |
+
#
|
| 87 |
+
# This is unfortunate, because it means that the linehaul data on libc
|
| 88 |
+
# versions that was generated by pip 8.1.2 and earlier is useless and
|
| 89 |
+
# misleading. Solution: instead of using platform, use our code that actually
|
| 90 |
+
# works.
|
| 91 |
+
def libc_ver() -> Tuple[str, str]:
|
| 92 |
+
"""Try to determine the glibc version
|
| 93 |
+
|
| 94 |
+
Returns a tuple of strings (lib, version) which default to empty strings
|
| 95 |
+
in case the lookup fails.
|
| 96 |
+
"""
|
| 97 |
+
glibc_version = glibc_version_string()
|
| 98 |
+
if glibc_version is None:
|
| 99 |
+
return ("", "")
|
| 100 |
+
else:
|
| 101 |
+
return ("glibc", glibc_version)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/subprocess.py
ADDED
|
@@ -0,0 +1,245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
import shlex
|
| 4 |
+
import subprocess
|
| 5 |
+
from typing import Any, Callable, Iterable, List, Literal, Mapping, Optional, Union
|
| 6 |
+
|
| 7 |
+
from pip._vendor.rich.markup import escape
|
| 8 |
+
|
| 9 |
+
from pip._internal.cli.spinners import SpinnerInterface, open_spinner
|
| 10 |
+
from pip._internal.exceptions import InstallationSubprocessError
|
| 11 |
+
from pip._internal.utils.logging import VERBOSE, subprocess_logger
|
| 12 |
+
from pip._internal.utils.misc import HiddenText
|
| 13 |
+
|
| 14 |
+
CommandArgs = List[Union[str, HiddenText]]
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def make_command(*args: Union[str, HiddenText, CommandArgs]) -> CommandArgs:
|
| 18 |
+
"""
|
| 19 |
+
Create a CommandArgs object.
|
| 20 |
+
"""
|
| 21 |
+
command_args: CommandArgs = []
|
| 22 |
+
for arg in args:
|
| 23 |
+
# Check for list instead of CommandArgs since CommandArgs is
|
| 24 |
+
# only known during type-checking.
|
| 25 |
+
if isinstance(arg, list):
|
| 26 |
+
command_args.extend(arg)
|
| 27 |
+
else:
|
| 28 |
+
# Otherwise, arg is str or HiddenText.
|
| 29 |
+
command_args.append(arg)
|
| 30 |
+
|
| 31 |
+
return command_args
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def format_command_args(args: Union[List[str], CommandArgs]) -> str:
|
| 35 |
+
"""
|
| 36 |
+
Format command arguments for display.
|
| 37 |
+
"""
|
| 38 |
+
# For HiddenText arguments, display the redacted form by calling str().
|
| 39 |
+
# Also, we don't apply str() to arguments that aren't HiddenText since
|
| 40 |
+
# this can trigger a UnicodeDecodeError in Python 2 if the argument
|
| 41 |
+
# has type unicode and includes a non-ascii character. (The type
|
| 42 |
+
# checker doesn't ensure the annotations are correct in all cases.)
|
| 43 |
+
return " ".join(
|
| 44 |
+
shlex.quote(str(arg)) if isinstance(arg, HiddenText) else shlex.quote(arg)
|
| 45 |
+
for arg in args
|
| 46 |
+
)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def reveal_command_args(args: Union[List[str], CommandArgs]) -> List[str]:
|
| 50 |
+
"""
|
| 51 |
+
Return the arguments in their raw, unredacted form.
|
| 52 |
+
"""
|
| 53 |
+
return [arg.secret if isinstance(arg, HiddenText) else arg for arg in args]
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def call_subprocess(
|
| 57 |
+
cmd: Union[List[str], CommandArgs],
|
| 58 |
+
show_stdout: bool = False,
|
| 59 |
+
cwd: Optional[str] = None,
|
| 60 |
+
on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise",
|
| 61 |
+
extra_ok_returncodes: Optional[Iterable[int]] = None,
|
| 62 |
+
extra_environ: Optional[Mapping[str, Any]] = None,
|
| 63 |
+
unset_environ: Optional[Iterable[str]] = None,
|
| 64 |
+
spinner: Optional[SpinnerInterface] = None,
|
| 65 |
+
log_failed_cmd: Optional[bool] = True,
|
| 66 |
+
stdout_only: Optional[bool] = False,
|
| 67 |
+
*,
|
| 68 |
+
command_desc: str,
|
| 69 |
+
) -> str:
|
| 70 |
+
"""
|
| 71 |
+
Args:
|
| 72 |
+
show_stdout: if true, use INFO to log the subprocess's stderr and
|
| 73 |
+
stdout streams. Otherwise, use DEBUG. Defaults to False.
|
| 74 |
+
extra_ok_returncodes: an iterable of integer return codes that are
|
| 75 |
+
acceptable, in addition to 0. Defaults to None, which means [].
|
| 76 |
+
unset_environ: an iterable of environment variable names to unset
|
| 77 |
+
prior to calling subprocess.Popen().
|
| 78 |
+
log_failed_cmd: if false, failed commands are not logged, only raised.
|
| 79 |
+
stdout_only: if true, return only stdout, else return both. When true,
|
| 80 |
+
logging of both stdout and stderr occurs when the subprocess has
|
| 81 |
+
terminated, else logging occurs as subprocess output is produced.
|
| 82 |
+
"""
|
| 83 |
+
if extra_ok_returncodes is None:
|
| 84 |
+
extra_ok_returncodes = []
|
| 85 |
+
if unset_environ is None:
|
| 86 |
+
unset_environ = []
|
| 87 |
+
# Most places in pip use show_stdout=False. What this means is--
|
| 88 |
+
#
|
| 89 |
+
# - We connect the child's output (combined stderr and stdout) to a
|
| 90 |
+
# single pipe, which we read.
|
| 91 |
+
# - We log this output to stderr at DEBUG level as it is received.
|
| 92 |
+
# - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't
|
| 93 |
+
# requested), then we show a spinner so the user can still see the
|
| 94 |
+
# subprocess is in progress.
|
| 95 |
+
# - If the subprocess exits with an error, we log the output to stderr
|
| 96 |
+
# at ERROR level if it hasn't already been displayed to the console
|
| 97 |
+
# (e.g. if --verbose logging wasn't enabled). This way we don't log
|
| 98 |
+
# the output to the console twice.
|
| 99 |
+
#
|
| 100 |
+
# If show_stdout=True, then the above is still done, but with DEBUG
|
| 101 |
+
# replaced by INFO.
|
| 102 |
+
if show_stdout:
|
| 103 |
+
# Then log the subprocess output at INFO level.
|
| 104 |
+
log_subprocess: Callable[..., None] = subprocess_logger.info
|
| 105 |
+
used_level = logging.INFO
|
| 106 |
+
else:
|
| 107 |
+
# Then log the subprocess output using VERBOSE. This also ensures
|
| 108 |
+
# it will be logged to the log file (aka user_log), if enabled.
|
| 109 |
+
log_subprocess = subprocess_logger.verbose
|
| 110 |
+
used_level = VERBOSE
|
| 111 |
+
|
| 112 |
+
# Whether the subprocess will be visible in the console.
|
| 113 |
+
showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level
|
| 114 |
+
|
| 115 |
+
# Only use the spinner if we're not showing the subprocess output
|
| 116 |
+
# and we have a spinner.
|
| 117 |
+
use_spinner = not showing_subprocess and spinner is not None
|
| 118 |
+
|
| 119 |
+
log_subprocess("Running command %s", command_desc)
|
| 120 |
+
env = os.environ.copy()
|
| 121 |
+
if extra_environ:
|
| 122 |
+
env.update(extra_environ)
|
| 123 |
+
for name in unset_environ:
|
| 124 |
+
env.pop(name, None)
|
| 125 |
+
try:
|
| 126 |
+
proc = subprocess.Popen(
|
| 127 |
+
# Convert HiddenText objects to the underlying str.
|
| 128 |
+
reveal_command_args(cmd),
|
| 129 |
+
stdin=subprocess.PIPE,
|
| 130 |
+
stdout=subprocess.PIPE,
|
| 131 |
+
stderr=subprocess.STDOUT if not stdout_only else subprocess.PIPE,
|
| 132 |
+
cwd=cwd,
|
| 133 |
+
env=env,
|
| 134 |
+
errors="backslashreplace",
|
| 135 |
+
)
|
| 136 |
+
except Exception as exc:
|
| 137 |
+
if log_failed_cmd:
|
| 138 |
+
subprocess_logger.critical(
|
| 139 |
+
"Error %s while executing command %s",
|
| 140 |
+
exc,
|
| 141 |
+
command_desc,
|
| 142 |
+
)
|
| 143 |
+
raise
|
| 144 |
+
all_output = []
|
| 145 |
+
if not stdout_only:
|
| 146 |
+
assert proc.stdout
|
| 147 |
+
assert proc.stdin
|
| 148 |
+
proc.stdin.close()
|
| 149 |
+
# In this mode, stdout and stderr are in the same pipe.
|
| 150 |
+
while True:
|
| 151 |
+
line: str = proc.stdout.readline()
|
| 152 |
+
if not line:
|
| 153 |
+
break
|
| 154 |
+
line = line.rstrip()
|
| 155 |
+
all_output.append(line + "\n")
|
| 156 |
+
|
| 157 |
+
# Show the line immediately.
|
| 158 |
+
log_subprocess(line)
|
| 159 |
+
# Update the spinner.
|
| 160 |
+
if use_spinner:
|
| 161 |
+
assert spinner
|
| 162 |
+
spinner.spin()
|
| 163 |
+
try:
|
| 164 |
+
proc.wait()
|
| 165 |
+
finally:
|
| 166 |
+
if proc.stdout:
|
| 167 |
+
proc.stdout.close()
|
| 168 |
+
output = "".join(all_output)
|
| 169 |
+
else:
|
| 170 |
+
# In this mode, stdout and stderr are in different pipes.
|
| 171 |
+
# We must use communicate() which is the only safe way to read both.
|
| 172 |
+
out, err = proc.communicate()
|
| 173 |
+
# log line by line to preserve pip log indenting
|
| 174 |
+
for out_line in out.splitlines():
|
| 175 |
+
log_subprocess(out_line)
|
| 176 |
+
all_output.append(out)
|
| 177 |
+
for err_line in err.splitlines():
|
| 178 |
+
log_subprocess(err_line)
|
| 179 |
+
all_output.append(err)
|
| 180 |
+
output = out
|
| 181 |
+
|
| 182 |
+
proc_had_error = proc.returncode and proc.returncode not in extra_ok_returncodes
|
| 183 |
+
if use_spinner:
|
| 184 |
+
assert spinner
|
| 185 |
+
if proc_had_error:
|
| 186 |
+
spinner.finish("error")
|
| 187 |
+
else:
|
| 188 |
+
spinner.finish("done")
|
| 189 |
+
if proc_had_error:
|
| 190 |
+
if on_returncode == "raise":
|
| 191 |
+
error = InstallationSubprocessError(
|
| 192 |
+
command_description=command_desc,
|
| 193 |
+
exit_code=proc.returncode,
|
| 194 |
+
output_lines=all_output if not showing_subprocess else None,
|
| 195 |
+
)
|
| 196 |
+
if log_failed_cmd:
|
| 197 |
+
subprocess_logger.error("%s", error, extra={"rich": True})
|
| 198 |
+
subprocess_logger.verbose(
|
| 199 |
+
"[bold magenta]full command[/]: [blue]%s[/]",
|
| 200 |
+
escape(format_command_args(cmd)),
|
| 201 |
+
extra={"markup": True},
|
| 202 |
+
)
|
| 203 |
+
subprocess_logger.verbose(
|
| 204 |
+
"[bold magenta]cwd[/]: %s",
|
| 205 |
+
escape(cwd or "[inherit]"),
|
| 206 |
+
extra={"markup": True},
|
| 207 |
+
)
|
| 208 |
+
|
| 209 |
+
raise error
|
| 210 |
+
elif on_returncode == "warn":
|
| 211 |
+
subprocess_logger.warning(
|
| 212 |
+
'Command "%s" had error code %s in %s',
|
| 213 |
+
command_desc,
|
| 214 |
+
proc.returncode,
|
| 215 |
+
cwd,
|
| 216 |
+
)
|
| 217 |
+
elif on_returncode == "ignore":
|
| 218 |
+
pass
|
| 219 |
+
else:
|
| 220 |
+
raise ValueError(f"Invalid value: on_returncode={on_returncode!r}")
|
| 221 |
+
return output
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
def runner_with_spinner_message(message: str) -> Callable[..., None]:
|
| 225 |
+
"""Provide a subprocess_runner that shows a spinner message.
|
| 226 |
+
|
| 227 |
+
Intended for use with for BuildBackendHookCaller. Thus, the runner has
|
| 228 |
+
an API that matches what's expected by BuildBackendHookCaller.subprocess_runner.
|
| 229 |
+
"""
|
| 230 |
+
|
| 231 |
+
def runner(
|
| 232 |
+
cmd: List[str],
|
| 233 |
+
cwd: Optional[str] = None,
|
| 234 |
+
extra_environ: Optional[Mapping[str, Any]] = None,
|
| 235 |
+
) -> None:
|
| 236 |
+
with open_spinner(message) as spinner:
|
| 237 |
+
call_subprocess(
|
| 238 |
+
cmd,
|
| 239 |
+
command_desc=message,
|
| 240 |
+
cwd=cwd,
|
| 241 |
+
extra_environ=extra_environ,
|
| 242 |
+
spinner=spinner,
|
| 243 |
+
)
|
| 244 |
+
|
| 245 |
+
return runner
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/temp_dir.py
ADDED
|
@@ -0,0 +1,296 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import errno
|
| 2 |
+
import itertools
|
| 3 |
+
import logging
|
| 4 |
+
import os.path
|
| 5 |
+
import tempfile
|
| 6 |
+
import traceback
|
| 7 |
+
from contextlib import ExitStack, contextmanager
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
from typing import (
|
| 10 |
+
Any,
|
| 11 |
+
Callable,
|
| 12 |
+
Dict,
|
| 13 |
+
Generator,
|
| 14 |
+
List,
|
| 15 |
+
Optional,
|
| 16 |
+
TypeVar,
|
| 17 |
+
Union,
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
from pip._internal.utils.misc import enum, rmtree
|
| 21 |
+
|
| 22 |
+
logger = logging.getLogger(__name__)
|
| 23 |
+
|
| 24 |
+
_T = TypeVar("_T", bound="TempDirectory")
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
# Kinds of temporary directories. Only needed for ones that are
|
| 28 |
+
# globally-managed.
|
| 29 |
+
tempdir_kinds = enum(
|
| 30 |
+
BUILD_ENV="build-env",
|
| 31 |
+
EPHEM_WHEEL_CACHE="ephem-wheel-cache",
|
| 32 |
+
REQ_BUILD="req-build",
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
_tempdir_manager: Optional[ExitStack] = None
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
@contextmanager
|
| 40 |
+
def global_tempdir_manager() -> Generator[None, None, None]:
|
| 41 |
+
global _tempdir_manager
|
| 42 |
+
with ExitStack() as stack:
|
| 43 |
+
old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack
|
| 44 |
+
try:
|
| 45 |
+
yield
|
| 46 |
+
finally:
|
| 47 |
+
_tempdir_manager = old_tempdir_manager
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class TempDirectoryTypeRegistry:
|
| 51 |
+
"""Manages temp directory behavior"""
|
| 52 |
+
|
| 53 |
+
def __init__(self) -> None:
|
| 54 |
+
self._should_delete: Dict[str, bool] = {}
|
| 55 |
+
|
| 56 |
+
def set_delete(self, kind: str, value: bool) -> None:
|
| 57 |
+
"""Indicate whether a TempDirectory of the given kind should be
|
| 58 |
+
auto-deleted.
|
| 59 |
+
"""
|
| 60 |
+
self._should_delete[kind] = value
|
| 61 |
+
|
| 62 |
+
def get_delete(self, kind: str) -> bool:
|
| 63 |
+
"""Get configured auto-delete flag for a given TempDirectory type,
|
| 64 |
+
default True.
|
| 65 |
+
"""
|
| 66 |
+
return self._should_delete.get(kind, True)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
_tempdir_registry: Optional[TempDirectoryTypeRegistry] = None
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
@contextmanager
|
| 73 |
+
def tempdir_registry() -> Generator[TempDirectoryTypeRegistry, None, None]:
|
| 74 |
+
"""Provides a scoped global tempdir registry that can be used to dictate
|
| 75 |
+
whether directories should be deleted.
|
| 76 |
+
"""
|
| 77 |
+
global _tempdir_registry
|
| 78 |
+
old_tempdir_registry = _tempdir_registry
|
| 79 |
+
_tempdir_registry = TempDirectoryTypeRegistry()
|
| 80 |
+
try:
|
| 81 |
+
yield _tempdir_registry
|
| 82 |
+
finally:
|
| 83 |
+
_tempdir_registry = old_tempdir_registry
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
class _Default:
|
| 87 |
+
pass
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
_default = _Default()
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
class TempDirectory:
|
| 94 |
+
"""Helper class that owns and cleans up a temporary directory.
|
| 95 |
+
|
| 96 |
+
This class can be used as a context manager or as an OO representation of a
|
| 97 |
+
temporary directory.
|
| 98 |
+
|
| 99 |
+
Attributes:
|
| 100 |
+
path
|
| 101 |
+
Location to the created temporary directory
|
| 102 |
+
delete
|
| 103 |
+
Whether the directory should be deleted when exiting
|
| 104 |
+
(when used as a contextmanager)
|
| 105 |
+
|
| 106 |
+
Methods:
|
| 107 |
+
cleanup()
|
| 108 |
+
Deletes the temporary directory
|
| 109 |
+
|
| 110 |
+
When used as a context manager, if the delete attribute is True, on
|
| 111 |
+
exiting the context the temporary directory is deleted.
|
| 112 |
+
"""
|
| 113 |
+
|
| 114 |
+
def __init__(
|
| 115 |
+
self,
|
| 116 |
+
path: Optional[str] = None,
|
| 117 |
+
delete: Union[bool, None, _Default] = _default,
|
| 118 |
+
kind: str = "temp",
|
| 119 |
+
globally_managed: bool = False,
|
| 120 |
+
ignore_cleanup_errors: bool = True,
|
| 121 |
+
):
|
| 122 |
+
super().__init__()
|
| 123 |
+
|
| 124 |
+
if delete is _default:
|
| 125 |
+
if path is not None:
|
| 126 |
+
# If we were given an explicit directory, resolve delete option
|
| 127 |
+
# now.
|
| 128 |
+
delete = False
|
| 129 |
+
else:
|
| 130 |
+
# Otherwise, we wait until cleanup and see what
|
| 131 |
+
# tempdir_registry says.
|
| 132 |
+
delete = None
|
| 133 |
+
|
| 134 |
+
# The only time we specify path is in for editables where it
|
| 135 |
+
# is the value of the --src option.
|
| 136 |
+
if path is None:
|
| 137 |
+
path = self._create(kind)
|
| 138 |
+
|
| 139 |
+
self._path = path
|
| 140 |
+
self._deleted = False
|
| 141 |
+
self.delete = delete
|
| 142 |
+
self.kind = kind
|
| 143 |
+
self.ignore_cleanup_errors = ignore_cleanup_errors
|
| 144 |
+
|
| 145 |
+
if globally_managed:
|
| 146 |
+
assert _tempdir_manager is not None
|
| 147 |
+
_tempdir_manager.enter_context(self)
|
| 148 |
+
|
| 149 |
+
@property
|
| 150 |
+
def path(self) -> str:
|
| 151 |
+
assert not self._deleted, f"Attempted to access deleted path: {self._path}"
|
| 152 |
+
return self._path
|
| 153 |
+
|
| 154 |
+
def __repr__(self) -> str:
|
| 155 |
+
return f"<{self.__class__.__name__} {self.path!r}>"
|
| 156 |
+
|
| 157 |
+
def __enter__(self: _T) -> _T:
|
| 158 |
+
return self
|
| 159 |
+
|
| 160 |
+
def __exit__(self, exc: Any, value: Any, tb: Any) -> None:
|
| 161 |
+
if self.delete is not None:
|
| 162 |
+
delete = self.delete
|
| 163 |
+
elif _tempdir_registry:
|
| 164 |
+
delete = _tempdir_registry.get_delete(self.kind)
|
| 165 |
+
else:
|
| 166 |
+
delete = True
|
| 167 |
+
|
| 168 |
+
if delete:
|
| 169 |
+
self.cleanup()
|
| 170 |
+
|
| 171 |
+
def _create(self, kind: str) -> str:
|
| 172 |
+
"""Create a temporary directory and store its path in self.path"""
|
| 173 |
+
# We realpath here because some systems have their default tmpdir
|
| 174 |
+
# symlinked to another directory. This tends to confuse build
|
| 175 |
+
# scripts, so we canonicalize the path by traversing potential
|
| 176 |
+
# symlinks here.
|
| 177 |
+
path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
|
| 178 |
+
logger.debug("Created temporary directory: %s", path)
|
| 179 |
+
return path
|
| 180 |
+
|
| 181 |
+
def cleanup(self) -> None:
|
| 182 |
+
"""Remove the temporary directory created and reset state"""
|
| 183 |
+
self._deleted = True
|
| 184 |
+
if not os.path.exists(self._path):
|
| 185 |
+
return
|
| 186 |
+
|
| 187 |
+
errors: List[BaseException] = []
|
| 188 |
+
|
| 189 |
+
def onerror(
|
| 190 |
+
func: Callable[..., Any],
|
| 191 |
+
path: Path,
|
| 192 |
+
exc_val: BaseException,
|
| 193 |
+
) -> None:
|
| 194 |
+
"""Log a warning for a `rmtree` error and continue"""
|
| 195 |
+
formatted_exc = "\n".join(
|
| 196 |
+
traceback.format_exception_only(type(exc_val), exc_val)
|
| 197 |
+
)
|
| 198 |
+
formatted_exc = formatted_exc.rstrip() # remove trailing new line
|
| 199 |
+
if func in (os.unlink, os.remove, os.rmdir):
|
| 200 |
+
logger.debug(
|
| 201 |
+
"Failed to remove a temporary file '%s' due to %s.\n",
|
| 202 |
+
path,
|
| 203 |
+
formatted_exc,
|
| 204 |
+
)
|
| 205 |
+
else:
|
| 206 |
+
logger.debug("%s failed with %s.", func.__qualname__, formatted_exc)
|
| 207 |
+
errors.append(exc_val)
|
| 208 |
+
|
| 209 |
+
if self.ignore_cleanup_errors:
|
| 210 |
+
try:
|
| 211 |
+
# first try with @retry; retrying to handle ephemeral errors
|
| 212 |
+
rmtree(self._path, ignore_errors=False)
|
| 213 |
+
except OSError:
|
| 214 |
+
# last pass ignore/log all errors
|
| 215 |
+
rmtree(self._path, onexc=onerror)
|
| 216 |
+
if errors:
|
| 217 |
+
logger.warning(
|
| 218 |
+
"Failed to remove contents in a temporary directory '%s'.\n"
|
| 219 |
+
"You can safely remove it manually.",
|
| 220 |
+
self._path,
|
| 221 |
+
)
|
| 222 |
+
else:
|
| 223 |
+
rmtree(self._path)
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
class AdjacentTempDirectory(TempDirectory):
|
| 227 |
+
"""Helper class that creates a temporary directory adjacent to a real one.
|
| 228 |
+
|
| 229 |
+
Attributes:
|
| 230 |
+
original
|
| 231 |
+
The original directory to create a temp directory for.
|
| 232 |
+
path
|
| 233 |
+
After calling create() or entering, contains the full
|
| 234 |
+
path to the temporary directory.
|
| 235 |
+
delete
|
| 236 |
+
Whether the directory should be deleted when exiting
|
| 237 |
+
(when used as a contextmanager)
|
| 238 |
+
|
| 239 |
+
"""
|
| 240 |
+
|
| 241 |
+
# The characters that may be used to name the temp directory
|
| 242 |
+
# We always prepend a ~ and then rotate through these until
|
| 243 |
+
# a usable name is found.
|
| 244 |
+
# pkg_resources raises a different error for .dist-info folder
|
| 245 |
+
# with leading '-' and invalid metadata
|
| 246 |
+
LEADING_CHARS = "-~.=%0123456789"
|
| 247 |
+
|
| 248 |
+
def __init__(self, original: str, delete: Optional[bool] = None) -> None:
|
| 249 |
+
self.original = original.rstrip("/\\")
|
| 250 |
+
super().__init__(delete=delete)
|
| 251 |
+
|
| 252 |
+
@classmethod
|
| 253 |
+
def _generate_names(cls, name: str) -> Generator[str, None, None]:
|
| 254 |
+
"""Generates a series of temporary names.
|
| 255 |
+
|
| 256 |
+
The algorithm replaces the leading characters in the name
|
| 257 |
+
with ones that are valid filesystem characters, but are not
|
| 258 |
+
valid package names (for both Python and pip definitions of
|
| 259 |
+
package).
|
| 260 |
+
"""
|
| 261 |
+
for i in range(1, len(name)):
|
| 262 |
+
for candidate in itertools.combinations_with_replacement(
|
| 263 |
+
cls.LEADING_CHARS, i - 1
|
| 264 |
+
):
|
| 265 |
+
new_name = "~" + "".join(candidate) + name[i:]
|
| 266 |
+
if new_name != name:
|
| 267 |
+
yield new_name
|
| 268 |
+
|
| 269 |
+
# If we make it this far, we will have to make a longer name
|
| 270 |
+
for i in range(len(cls.LEADING_CHARS)):
|
| 271 |
+
for candidate in itertools.combinations_with_replacement(
|
| 272 |
+
cls.LEADING_CHARS, i
|
| 273 |
+
):
|
| 274 |
+
new_name = "~" + "".join(candidate) + name
|
| 275 |
+
if new_name != name:
|
| 276 |
+
yield new_name
|
| 277 |
+
|
| 278 |
+
def _create(self, kind: str) -> str:
|
| 279 |
+
root, name = os.path.split(self.original)
|
| 280 |
+
for candidate in self._generate_names(name):
|
| 281 |
+
path = os.path.join(root, candidate)
|
| 282 |
+
try:
|
| 283 |
+
os.mkdir(path)
|
| 284 |
+
except OSError as ex:
|
| 285 |
+
# Continue if the name exists already
|
| 286 |
+
if ex.errno != errno.EEXIST:
|
| 287 |
+
raise
|
| 288 |
+
else:
|
| 289 |
+
path = os.path.realpath(path)
|
| 290 |
+
break
|
| 291 |
+
else:
|
| 292 |
+
# Final fallback on the default behavior.
|
| 293 |
+
path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
|
| 294 |
+
|
| 295 |
+
logger.debug("Created temporary directory: %s", path)
|
| 296 |
+
return path
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/utils/virtualenv.py
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
import re
|
| 4 |
+
import site
|
| 5 |
+
import sys
|
| 6 |
+
from typing import List, Optional
|
| 7 |
+
|
| 8 |
+
logger = logging.getLogger(__name__)
|
| 9 |
+
_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile(
|
| 10 |
+
r"include-system-site-packages\s*=\s*(?P<value>true|false)"
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def _running_under_venv() -> bool:
|
| 15 |
+
"""Checks if sys.base_prefix and sys.prefix match.
|
| 16 |
+
|
| 17 |
+
This handles PEP 405 compliant virtual environments.
|
| 18 |
+
"""
|
| 19 |
+
return sys.prefix != getattr(sys, "base_prefix", sys.prefix)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def _running_under_legacy_virtualenv() -> bool:
|
| 23 |
+
"""Checks if sys.real_prefix is set.
|
| 24 |
+
|
| 25 |
+
This handles virtual environments created with pypa's virtualenv.
|
| 26 |
+
"""
|
| 27 |
+
# pypa/virtualenv case
|
| 28 |
+
return hasattr(sys, "real_prefix")
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def running_under_virtualenv() -> bool:
|
| 32 |
+
"""True if we're running inside a virtual environment, False otherwise."""
|
| 33 |
+
return _running_under_venv() or _running_under_legacy_virtualenv()
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def _get_pyvenv_cfg_lines() -> Optional[List[str]]:
|
| 37 |
+
"""Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines
|
| 38 |
+
|
| 39 |
+
Returns None, if it could not read/access the file.
|
| 40 |
+
"""
|
| 41 |
+
pyvenv_cfg_file = os.path.join(sys.prefix, "pyvenv.cfg")
|
| 42 |
+
try:
|
| 43 |
+
# Although PEP 405 does not specify, the built-in venv module always
|
| 44 |
+
# writes with UTF-8. (pypa/pip#8717)
|
| 45 |
+
with open(pyvenv_cfg_file, encoding="utf-8") as f:
|
| 46 |
+
return f.read().splitlines() # avoids trailing newlines
|
| 47 |
+
except OSError:
|
| 48 |
+
return None
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def _no_global_under_venv() -> bool:
|
| 52 |
+
"""Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion
|
| 53 |
+
|
| 54 |
+
PEP 405 specifies that when system site-packages are not supposed to be
|
| 55 |
+
visible from a virtual environment, `pyvenv.cfg` must contain the following
|
| 56 |
+
line:
|
| 57 |
+
|
| 58 |
+
include-system-site-packages = false
|
| 59 |
+
|
| 60 |
+
Additionally, log a warning if accessing the file fails.
|
| 61 |
+
"""
|
| 62 |
+
cfg_lines = _get_pyvenv_cfg_lines()
|
| 63 |
+
if cfg_lines is None:
|
| 64 |
+
# We're not in a "sane" venv, so assume there is no system
|
| 65 |
+
# site-packages access (since that's PEP 405's default state).
|
| 66 |
+
logger.warning(
|
| 67 |
+
"Could not access 'pyvenv.cfg' despite a virtual environment "
|
| 68 |
+
"being active. Assuming global site-packages is not accessible "
|
| 69 |
+
"in this environment."
|
| 70 |
+
)
|
| 71 |
+
return True
|
| 72 |
+
|
| 73 |
+
for line in cfg_lines:
|
| 74 |
+
match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line)
|
| 75 |
+
if match is not None and match.group("value") == "false":
|
| 76 |
+
return True
|
| 77 |
+
return False
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def _no_global_under_legacy_virtualenv() -> bool:
|
| 81 |
+
"""Check if "no-global-site-packages.txt" exists beside site.py
|
| 82 |
+
|
| 83 |
+
This mirrors logic in pypa/virtualenv for determining whether system
|
| 84 |
+
site-packages are visible in the virtual environment.
|
| 85 |
+
"""
|
| 86 |
+
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
|
| 87 |
+
no_global_site_packages_file = os.path.join(
|
| 88 |
+
site_mod_dir,
|
| 89 |
+
"no-global-site-packages.txt",
|
| 90 |
+
)
|
| 91 |
+
return os.path.exists(no_global_site_packages_file)
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def virtualenv_no_global() -> bool:
|
| 95 |
+
"""Returns a boolean, whether running in venv with no system site-packages."""
|
| 96 |
+
# PEP 405 compliance needs to be checked first since virtualenv >=20 would
|
| 97 |
+
# return True for both checks, but is only able to use the PEP 405 config.
|
| 98 |
+
if _running_under_venv():
|
| 99 |
+
return _no_global_under_venv()
|
| 100 |
+
|
| 101 |
+
if _running_under_legacy_virtualenv():
|
| 102 |
+
return _no_global_under_legacy_virtualenv()
|
| 103 |
+
|
| 104 |
+
return False
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/compat.py
ADDED
|
@@ -0,0 +1,1137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
#
|
| 3 |
+
# Copyright (C) 2013-2017 Vinay Sajip.
|
| 4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
| 5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 6 |
+
#
|
| 7 |
+
from __future__ import absolute_import
|
| 8 |
+
|
| 9 |
+
import os
|
| 10 |
+
import re
|
| 11 |
+
import shutil
|
| 12 |
+
import sys
|
| 13 |
+
|
| 14 |
+
try:
|
| 15 |
+
import ssl
|
| 16 |
+
except ImportError: # pragma: no cover
|
| 17 |
+
ssl = None
|
| 18 |
+
|
| 19 |
+
if sys.version_info[0] < 3: # pragma: no cover
|
| 20 |
+
from StringIO import StringIO
|
| 21 |
+
string_types = basestring,
|
| 22 |
+
text_type = unicode
|
| 23 |
+
from types import FileType as file_type
|
| 24 |
+
import __builtin__ as builtins
|
| 25 |
+
import ConfigParser as configparser
|
| 26 |
+
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit
|
| 27 |
+
from urllib import (urlretrieve, quote as _quote, unquote, url2pathname,
|
| 28 |
+
pathname2url, ContentTooShortError, splittype)
|
| 29 |
+
|
| 30 |
+
def quote(s):
|
| 31 |
+
if isinstance(s, unicode):
|
| 32 |
+
s = s.encode('utf-8')
|
| 33 |
+
return _quote(s)
|
| 34 |
+
|
| 35 |
+
import urllib2
|
| 36 |
+
from urllib2 import (Request, urlopen, URLError, HTTPError,
|
| 37 |
+
HTTPBasicAuthHandler, HTTPPasswordMgr, HTTPHandler,
|
| 38 |
+
HTTPRedirectHandler, build_opener)
|
| 39 |
+
if ssl:
|
| 40 |
+
from urllib2 import HTTPSHandler
|
| 41 |
+
import httplib
|
| 42 |
+
import xmlrpclib
|
| 43 |
+
import Queue as queue
|
| 44 |
+
from HTMLParser import HTMLParser
|
| 45 |
+
import htmlentitydefs
|
| 46 |
+
raw_input = raw_input
|
| 47 |
+
from itertools import ifilter as filter
|
| 48 |
+
from itertools import ifilterfalse as filterfalse
|
| 49 |
+
|
| 50 |
+
# Leaving this around for now, in case it needs resurrecting in some way
|
| 51 |
+
# _userprog = None
|
| 52 |
+
# def splituser(host):
|
| 53 |
+
# """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
|
| 54 |
+
# global _userprog
|
| 55 |
+
# if _userprog is None:
|
| 56 |
+
# import re
|
| 57 |
+
# _userprog = re.compile('^(.*)@(.*)$')
|
| 58 |
+
|
| 59 |
+
# match = _userprog.match(host)
|
| 60 |
+
# if match: return match.group(1, 2)
|
| 61 |
+
# return None, host
|
| 62 |
+
|
| 63 |
+
else: # pragma: no cover
|
| 64 |
+
from io import StringIO
|
| 65 |
+
string_types = str,
|
| 66 |
+
text_type = str
|
| 67 |
+
from io import TextIOWrapper as file_type
|
| 68 |
+
import builtins
|
| 69 |
+
import configparser
|
| 70 |
+
from urllib.parse import (urlparse, urlunparse, urljoin, quote, unquote,
|
| 71 |
+
urlsplit, urlunsplit, splittype)
|
| 72 |
+
from urllib.request import (urlopen, urlretrieve, Request, url2pathname,
|
| 73 |
+
pathname2url, HTTPBasicAuthHandler,
|
| 74 |
+
HTTPPasswordMgr, HTTPHandler,
|
| 75 |
+
HTTPRedirectHandler, build_opener)
|
| 76 |
+
if ssl:
|
| 77 |
+
from urllib.request import HTTPSHandler
|
| 78 |
+
from urllib.error import HTTPError, URLError, ContentTooShortError
|
| 79 |
+
import http.client as httplib
|
| 80 |
+
import urllib.request as urllib2
|
| 81 |
+
import xmlrpc.client as xmlrpclib
|
| 82 |
+
import queue
|
| 83 |
+
from html.parser import HTMLParser
|
| 84 |
+
import html.entities as htmlentitydefs
|
| 85 |
+
raw_input = input
|
| 86 |
+
from itertools import filterfalse
|
| 87 |
+
filter = filter
|
| 88 |
+
|
| 89 |
+
try:
|
| 90 |
+
from ssl import match_hostname, CertificateError
|
| 91 |
+
except ImportError: # pragma: no cover
|
| 92 |
+
|
| 93 |
+
class CertificateError(ValueError):
|
| 94 |
+
pass
|
| 95 |
+
|
| 96 |
+
def _dnsname_match(dn, hostname, max_wildcards=1):
|
| 97 |
+
"""Matching according to RFC 6125, section 6.4.3
|
| 98 |
+
|
| 99 |
+
http://tools.ietf.org/html/rfc6125#section-6.4.3
|
| 100 |
+
"""
|
| 101 |
+
pats = []
|
| 102 |
+
if not dn:
|
| 103 |
+
return False
|
| 104 |
+
|
| 105 |
+
parts = dn.split('.')
|
| 106 |
+
leftmost, remainder = parts[0], parts[1:]
|
| 107 |
+
|
| 108 |
+
wildcards = leftmost.count('*')
|
| 109 |
+
if wildcards > max_wildcards:
|
| 110 |
+
# Issue #17980: avoid denials of service by refusing more
|
| 111 |
+
# than one wildcard per fragment. A survey of established
|
| 112 |
+
# policy among SSL implementations showed it to be a
|
| 113 |
+
# reasonable choice.
|
| 114 |
+
raise CertificateError(
|
| 115 |
+
"too many wildcards in certificate DNS name: " + repr(dn))
|
| 116 |
+
|
| 117 |
+
# speed up common case w/o wildcards
|
| 118 |
+
if not wildcards:
|
| 119 |
+
return dn.lower() == hostname.lower()
|
| 120 |
+
|
| 121 |
+
# RFC 6125, section 6.4.3, subitem 1.
|
| 122 |
+
# The client SHOULD NOT attempt to match a presented identifier in which
|
| 123 |
+
# the wildcard character comprises a label other than the left-most label.
|
| 124 |
+
if leftmost == '*':
|
| 125 |
+
# When '*' is a fragment by itself, it matches a non-empty dotless
|
| 126 |
+
# fragment.
|
| 127 |
+
pats.append('[^.]+')
|
| 128 |
+
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
|
| 129 |
+
# RFC 6125, section 6.4.3, subitem 3.
|
| 130 |
+
# The client SHOULD NOT attempt to match a presented identifier
|
| 131 |
+
# where the wildcard character is embedded within an A-label or
|
| 132 |
+
# U-label of an internationalized domain name.
|
| 133 |
+
pats.append(re.escape(leftmost))
|
| 134 |
+
else:
|
| 135 |
+
# Otherwise, '*' matches any dotless string, e.g. www*
|
| 136 |
+
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
|
| 137 |
+
|
| 138 |
+
# add the remaining fragments, ignore any wildcards
|
| 139 |
+
for frag in remainder:
|
| 140 |
+
pats.append(re.escape(frag))
|
| 141 |
+
|
| 142 |
+
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
|
| 143 |
+
return pat.match(hostname)
|
| 144 |
+
|
| 145 |
+
def match_hostname(cert, hostname):
|
| 146 |
+
"""Verify that *cert* (in decoded format as returned by
|
| 147 |
+
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
|
| 148 |
+
rules are followed, but IP addresses are not accepted for *hostname*.
|
| 149 |
+
|
| 150 |
+
CertificateError is raised on failure. On success, the function
|
| 151 |
+
returns nothing.
|
| 152 |
+
"""
|
| 153 |
+
if not cert:
|
| 154 |
+
raise ValueError("empty or no certificate, match_hostname needs a "
|
| 155 |
+
"SSL socket or SSL context with either "
|
| 156 |
+
"CERT_OPTIONAL or CERT_REQUIRED")
|
| 157 |
+
dnsnames = []
|
| 158 |
+
san = cert.get('subjectAltName', ())
|
| 159 |
+
for key, value in san:
|
| 160 |
+
if key == 'DNS':
|
| 161 |
+
if _dnsname_match(value, hostname):
|
| 162 |
+
return
|
| 163 |
+
dnsnames.append(value)
|
| 164 |
+
if not dnsnames:
|
| 165 |
+
# The subject is only checked when there is no dNSName entry
|
| 166 |
+
# in subjectAltName
|
| 167 |
+
for sub in cert.get('subject', ()):
|
| 168 |
+
for key, value in sub:
|
| 169 |
+
# XXX according to RFC 2818, the most specific Common Name
|
| 170 |
+
# must be used.
|
| 171 |
+
if key == 'commonName':
|
| 172 |
+
if _dnsname_match(value, hostname):
|
| 173 |
+
return
|
| 174 |
+
dnsnames.append(value)
|
| 175 |
+
if len(dnsnames) > 1:
|
| 176 |
+
raise CertificateError("hostname %r "
|
| 177 |
+
"doesn't match either of %s" %
|
| 178 |
+
(hostname, ', '.join(map(repr, dnsnames))))
|
| 179 |
+
elif len(dnsnames) == 1:
|
| 180 |
+
raise CertificateError("hostname %r "
|
| 181 |
+
"doesn't match %r" %
|
| 182 |
+
(hostname, dnsnames[0]))
|
| 183 |
+
else:
|
| 184 |
+
raise CertificateError("no appropriate commonName or "
|
| 185 |
+
"subjectAltName fields were found")
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
try:
|
| 189 |
+
from types import SimpleNamespace as Container
|
| 190 |
+
except ImportError: # pragma: no cover
|
| 191 |
+
|
| 192 |
+
class Container(object):
|
| 193 |
+
"""
|
| 194 |
+
A generic container for when multiple values need to be returned
|
| 195 |
+
"""
|
| 196 |
+
|
| 197 |
+
def __init__(self, **kwargs):
|
| 198 |
+
self.__dict__.update(kwargs)
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
try:
|
| 202 |
+
from shutil import which
|
| 203 |
+
except ImportError: # pragma: no cover
|
| 204 |
+
# Implementation from Python 3.3
|
| 205 |
+
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
|
| 206 |
+
"""Given a command, mode, and a PATH string, return the path which
|
| 207 |
+
conforms to the given mode on the PATH, or None if there is no such
|
| 208 |
+
file.
|
| 209 |
+
|
| 210 |
+
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
|
| 211 |
+
of os.environ.get("PATH"), or can be overridden with a custom search
|
| 212 |
+
path.
|
| 213 |
+
|
| 214 |
+
"""
|
| 215 |
+
|
| 216 |
+
# Check that a given file can be accessed with the correct mode.
|
| 217 |
+
# Additionally check that `file` is not a directory, as on Windows
|
| 218 |
+
# directories pass the os.access check.
|
| 219 |
+
def _access_check(fn, mode):
|
| 220 |
+
return (os.path.exists(fn) and os.access(fn, mode) and not os.path.isdir(fn))
|
| 221 |
+
|
| 222 |
+
# If we're given a path with a directory part, look it up directly rather
|
| 223 |
+
# than referring to PATH directories. This includes checking relative to the
|
| 224 |
+
# current directory, e.g. ./script
|
| 225 |
+
if os.path.dirname(cmd):
|
| 226 |
+
if _access_check(cmd, mode):
|
| 227 |
+
return cmd
|
| 228 |
+
return None
|
| 229 |
+
|
| 230 |
+
if path is None:
|
| 231 |
+
path = os.environ.get("PATH", os.defpath)
|
| 232 |
+
if not path:
|
| 233 |
+
return None
|
| 234 |
+
path = path.split(os.pathsep)
|
| 235 |
+
|
| 236 |
+
if sys.platform == "win32":
|
| 237 |
+
# The current directory takes precedence on Windows.
|
| 238 |
+
if os.curdir not in path:
|
| 239 |
+
path.insert(0, os.curdir)
|
| 240 |
+
|
| 241 |
+
# PATHEXT is necessary to check on Windows.
|
| 242 |
+
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
|
| 243 |
+
# See if the given file matches any of the expected path extensions.
|
| 244 |
+
# This will allow us to short circuit when given "python.exe".
|
| 245 |
+
# If it does match, only test that one, otherwise we have to try
|
| 246 |
+
# others.
|
| 247 |
+
if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
|
| 248 |
+
files = [cmd]
|
| 249 |
+
else:
|
| 250 |
+
files = [cmd + ext for ext in pathext]
|
| 251 |
+
else:
|
| 252 |
+
# On other platforms you don't have things like PATHEXT to tell you
|
| 253 |
+
# what file suffixes are executable, so just pass on cmd as-is.
|
| 254 |
+
files = [cmd]
|
| 255 |
+
|
| 256 |
+
seen = set()
|
| 257 |
+
for dir in path:
|
| 258 |
+
normdir = os.path.normcase(dir)
|
| 259 |
+
if normdir not in seen:
|
| 260 |
+
seen.add(normdir)
|
| 261 |
+
for thefile in files:
|
| 262 |
+
name = os.path.join(dir, thefile)
|
| 263 |
+
if _access_check(name, mode):
|
| 264 |
+
return name
|
| 265 |
+
return None
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
# ZipFile is a context manager in 2.7, but not in 2.6
|
| 269 |
+
|
| 270 |
+
from zipfile import ZipFile as BaseZipFile
|
| 271 |
+
|
| 272 |
+
if hasattr(BaseZipFile, '__enter__'): # pragma: no cover
|
| 273 |
+
ZipFile = BaseZipFile
|
| 274 |
+
else: # pragma: no cover
|
| 275 |
+
from zipfile import ZipExtFile as BaseZipExtFile
|
| 276 |
+
|
| 277 |
+
class ZipExtFile(BaseZipExtFile):
|
| 278 |
+
|
| 279 |
+
def __init__(self, base):
|
| 280 |
+
self.__dict__.update(base.__dict__)
|
| 281 |
+
|
| 282 |
+
def __enter__(self):
|
| 283 |
+
return self
|
| 284 |
+
|
| 285 |
+
def __exit__(self, *exc_info):
|
| 286 |
+
self.close()
|
| 287 |
+
# return None, so if an exception occurred, it will propagate
|
| 288 |
+
|
| 289 |
+
class ZipFile(BaseZipFile):
|
| 290 |
+
|
| 291 |
+
def __enter__(self):
|
| 292 |
+
return self
|
| 293 |
+
|
| 294 |
+
def __exit__(self, *exc_info):
|
| 295 |
+
self.close()
|
| 296 |
+
# return None, so if an exception occurred, it will propagate
|
| 297 |
+
|
| 298 |
+
def open(self, *args, **kwargs):
|
| 299 |
+
base = BaseZipFile.open(self, *args, **kwargs)
|
| 300 |
+
return ZipExtFile(base)
|
| 301 |
+
|
| 302 |
+
|
| 303 |
+
try:
|
| 304 |
+
from platform import python_implementation
|
| 305 |
+
except ImportError: # pragma: no cover
|
| 306 |
+
|
| 307 |
+
def python_implementation():
|
| 308 |
+
"""Return a string identifying the Python implementation."""
|
| 309 |
+
if 'PyPy' in sys.version:
|
| 310 |
+
return 'PyPy'
|
| 311 |
+
if os.name == 'java':
|
| 312 |
+
return 'Jython'
|
| 313 |
+
if sys.version.startswith('IronPython'):
|
| 314 |
+
return 'IronPython'
|
| 315 |
+
return 'CPython'
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
import sysconfig
|
| 319 |
+
|
| 320 |
+
try:
|
| 321 |
+
callable = callable
|
| 322 |
+
except NameError: # pragma: no cover
|
| 323 |
+
from collections.abc import Callable
|
| 324 |
+
|
| 325 |
+
def callable(obj):
|
| 326 |
+
return isinstance(obj, Callable)
|
| 327 |
+
|
| 328 |
+
|
| 329 |
+
try:
|
| 330 |
+
fsencode = os.fsencode
|
| 331 |
+
fsdecode = os.fsdecode
|
| 332 |
+
except AttributeError: # pragma: no cover
|
| 333 |
+
# Issue #99: on some systems (e.g. containerised),
|
| 334 |
+
# sys.getfilesystemencoding() returns None, and we need a real value,
|
| 335 |
+
# so fall back to utf-8. From the CPython 2.7 docs relating to Unix and
|
| 336 |
+
# sys.getfilesystemencoding(): the return value is "the user’s preference
|
| 337 |
+
# according to the result of nl_langinfo(CODESET), or None if the
|
| 338 |
+
# nl_langinfo(CODESET) failed."
|
| 339 |
+
_fsencoding = sys.getfilesystemencoding() or 'utf-8'
|
| 340 |
+
if _fsencoding == 'mbcs':
|
| 341 |
+
_fserrors = 'strict'
|
| 342 |
+
else:
|
| 343 |
+
_fserrors = 'surrogateescape'
|
| 344 |
+
|
| 345 |
+
def fsencode(filename):
|
| 346 |
+
if isinstance(filename, bytes):
|
| 347 |
+
return filename
|
| 348 |
+
elif isinstance(filename, text_type):
|
| 349 |
+
return filename.encode(_fsencoding, _fserrors)
|
| 350 |
+
else:
|
| 351 |
+
raise TypeError("expect bytes or str, not %s" %
|
| 352 |
+
type(filename).__name__)
|
| 353 |
+
|
| 354 |
+
def fsdecode(filename):
|
| 355 |
+
if isinstance(filename, text_type):
|
| 356 |
+
return filename
|
| 357 |
+
elif isinstance(filename, bytes):
|
| 358 |
+
return filename.decode(_fsencoding, _fserrors)
|
| 359 |
+
else:
|
| 360 |
+
raise TypeError("expect bytes or str, not %s" %
|
| 361 |
+
type(filename).__name__)
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
try:
|
| 365 |
+
from tokenize import detect_encoding
|
| 366 |
+
except ImportError: # pragma: no cover
|
| 367 |
+
from codecs import BOM_UTF8, lookup
|
| 368 |
+
|
| 369 |
+
cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)")
|
| 370 |
+
|
| 371 |
+
def _get_normal_name(orig_enc):
|
| 372 |
+
"""Imitates get_normal_name in tokenizer.c."""
|
| 373 |
+
# Only care about the first 12 characters.
|
| 374 |
+
enc = orig_enc[:12].lower().replace("_", "-")
|
| 375 |
+
if enc == "utf-8" or enc.startswith("utf-8-"):
|
| 376 |
+
return "utf-8"
|
| 377 |
+
if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
|
| 378 |
+
enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
|
| 379 |
+
return "iso-8859-1"
|
| 380 |
+
return orig_enc
|
| 381 |
+
|
| 382 |
+
def detect_encoding(readline):
|
| 383 |
+
"""
|
| 384 |
+
The detect_encoding() function is used to detect the encoding that should
|
| 385 |
+
be used to decode a Python source file. It requires one argument, readline,
|
| 386 |
+
in the same way as the tokenize() generator.
|
| 387 |
+
|
| 388 |
+
It will call readline a maximum of twice, and return the encoding used
|
| 389 |
+
(as a string) and a list of any lines (left as bytes) it has read in.
|
| 390 |
+
|
| 391 |
+
It detects the encoding from the presence of a utf-8 bom or an encoding
|
| 392 |
+
cookie as specified in pep-0263. If both a bom and a cookie are present,
|
| 393 |
+
but disagree, a SyntaxError will be raised. If the encoding cookie is an
|
| 394 |
+
invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found,
|
| 395 |
+
'utf-8-sig' is returned.
|
| 396 |
+
|
| 397 |
+
If no encoding is specified, then the default of 'utf-8' will be returned.
|
| 398 |
+
"""
|
| 399 |
+
try:
|
| 400 |
+
filename = readline.__self__.name
|
| 401 |
+
except AttributeError:
|
| 402 |
+
filename = None
|
| 403 |
+
bom_found = False
|
| 404 |
+
encoding = None
|
| 405 |
+
default = 'utf-8'
|
| 406 |
+
|
| 407 |
+
def read_or_stop():
|
| 408 |
+
try:
|
| 409 |
+
return readline()
|
| 410 |
+
except StopIteration:
|
| 411 |
+
return b''
|
| 412 |
+
|
| 413 |
+
def find_cookie(line):
|
| 414 |
+
try:
|
| 415 |
+
# Decode as UTF-8. Either the line is an encoding declaration,
|
| 416 |
+
# in which case it should be pure ASCII, or it must be UTF-8
|
| 417 |
+
# per default encoding.
|
| 418 |
+
line_string = line.decode('utf-8')
|
| 419 |
+
except UnicodeDecodeError:
|
| 420 |
+
msg = "invalid or missing encoding declaration"
|
| 421 |
+
if filename is not None:
|
| 422 |
+
msg = '{} for {!r}'.format(msg, filename)
|
| 423 |
+
raise SyntaxError(msg)
|
| 424 |
+
|
| 425 |
+
matches = cookie_re.findall(line_string)
|
| 426 |
+
if not matches:
|
| 427 |
+
return None
|
| 428 |
+
encoding = _get_normal_name(matches[0])
|
| 429 |
+
try:
|
| 430 |
+
codec = lookup(encoding)
|
| 431 |
+
except LookupError:
|
| 432 |
+
# This behaviour mimics the Python interpreter
|
| 433 |
+
if filename is None:
|
| 434 |
+
msg = "unknown encoding: " + encoding
|
| 435 |
+
else:
|
| 436 |
+
msg = "unknown encoding for {!r}: {}".format(
|
| 437 |
+
filename, encoding)
|
| 438 |
+
raise SyntaxError(msg)
|
| 439 |
+
|
| 440 |
+
if bom_found:
|
| 441 |
+
if codec.name != 'utf-8':
|
| 442 |
+
# This behaviour mimics the Python interpreter
|
| 443 |
+
if filename is None:
|
| 444 |
+
msg = 'encoding problem: utf-8'
|
| 445 |
+
else:
|
| 446 |
+
msg = 'encoding problem for {!r}: utf-8'.format(
|
| 447 |
+
filename)
|
| 448 |
+
raise SyntaxError(msg)
|
| 449 |
+
encoding += '-sig'
|
| 450 |
+
return encoding
|
| 451 |
+
|
| 452 |
+
first = read_or_stop()
|
| 453 |
+
if first.startswith(BOM_UTF8):
|
| 454 |
+
bom_found = True
|
| 455 |
+
first = first[3:]
|
| 456 |
+
default = 'utf-8-sig'
|
| 457 |
+
if not first:
|
| 458 |
+
return default, []
|
| 459 |
+
|
| 460 |
+
encoding = find_cookie(first)
|
| 461 |
+
if encoding:
|
| 462 |
+
return encoding, [first]
|
| 463 |
+
|
| 464 |
+
second = read_or_stop()
|
| 465 |
+
if not second:
|
| 466 |
+
return default, [first]
|
| 467 |
+
|
| 468 |
+
encoding = find_cookie(second)
|
| 469 |
+
if encoding:
|
| 470 |
+
return encoding, [first, second]
|
| 471 |
+
|
| 472 |
+
return default, [first, second]
|
| 473 |
+
|
| 474 |
+
|
| 475 |
+
# For converting & <-> & etc.
|
| 476 |
+
try:
|
| 477 |
+
from html import escape
|
| 478 |
+
except ImportError:
|
| 479 |
+
from cgi import escape
|
| 480 |
+
if sys.version_info[:2] < (3, 4):
|
| 481 |
+
unescape = HTMLParser().unescape
|
| 482 |
+
else:
|
| 483 |
+
from html import unescape
|
| 484 |
+
|
| 485 |
+
try:
|
| 486 |
+
from collections import ChainMap
|
| 487 |
+
except ImportError: # pragma: no cover
|
| 488 |
+
from collections import MutableMapping
|
| 489 |
+
|
| 490 |
+
try:
|
| 491 |
+
from reprlib import recursive_repr as _recursive_repr
|
| 492 |
+
except ImportError:
|
| 493 |
+
|
| 494 |
+
def _recursive_repr(fillvalue='...'):
|
| 495 |
+
'''
|
| 496 |
+
Decorator to make a repr function return fillvalue for a recursive
|
| 497 |
+
call
|
| 498 |
+
'''
|
| 499 |
+
|
| 500 |
+
def decorating_function(user_function):
|
| 501 |
+
repr_running = set()
|
| 502 |
+
|
| 503 |
+
def wrapper(self):
|
| 504 |
+
key = id(self), get_ident()
|
| 505 |
+
if key in repr_running:
|
| 506 |
+
return fillvalue
|
| 507 |
+
repr_running.add(key)
|
| 508 |
+
try:
|
| 509 |
+
result = user_function(self)
|
| 510 |
+
finally:
|
| 511 |
+
repr_running.discard(key)
|
| 512 |
+
return result
|
| 513 |
+
|
| 514 |
+
# Can't use functools.wraps() here because of bootstrap issues
|
| 515 |
+
wrapper.__module__ = getattr(user_function, '__module__')
|
| 516 |
+
wrapper.__doc__ = getattr(user_function, '__doc__')
|
| 517 |
+
wrapper.__name__ = getattr(user_function, '__name__')
|
| 518 |
+
wrapper.__annotations__ = getattr(user_function,
|
| 519 |
+
'__annotations__', {})
|
| 520 |
+
return wrapper
|
| 521 |
+
|
| 522 |
+
return decorating_function
|
| 523 |
+
|
| 524 |
+
class ChainMap(MutableMapping):
|
| 525 |
+
'''
|
| 526 |
+
A ChainMap groups multiple dicts (or other mappings) together
|
| 527 |
+
to create a single, updateable view.
|
| 528 |
+
|
| 529 |
+
The underlying mappings are stored in a list. That list is public and can
|
| 530 |
+
accessed or updated using the *maps* attribute. There is no other state.
|
| 531 |
+
|
| 532 |
+
Lookups search the underlying mappings successively until a key is found.
|
| 533 |
+
In contrast, writes, updates, and deletions only operate on the first
|
| 534 |
+
mapping.
|
| 535 |
+
'''
|
| 536 |
+
|
| 537 |
+
def __init__(self, *maps):
|
| 538 |
+
'''Initialize a ChainMap by setting *maps* to the given mappings.
|
| 539 |
+
If no mappings are provided, a single empty dictionary is used.
|
| 540 |
+
|
| 541 |
+
'''
|
| 542 |
+
self.maps = list(maps) or [{}] # always at least one map
|
| 543 |
+
|
| 544 |
+
def __missing__(self, key):
|
| 545 |
+
raise KeyError(key)
|
| 546 |
+
|
| 547 |
+
def __getitem__(self, key):
|
| 548 |
+
for mapping in self.maps:
|
| 549 |
+
try:
|
| 550 |
+
return mapping[
|
| 551 |
+
key] # can't use 'key in mapping' with defaultdict
|
| 552 |
+
except KeyError:
|
| 553 |
+
pass
|
| 554 |
+
return self.__missing__(
|
| 555 |
+
key) # support subclasses that define __missing__
|
| 556 |
+
|
| 557 |
+
def get(self, key, default=None):
|
| 558 |
+
return self[key] if key in self else default
|
| 559 |
+
|
| 560 |
+
def __len__(self):
|
| 561 |
+
return len(set().union(
|
| 562 |
+
*self.maps)) # reuses stored hash values if possible
|
| 563 |
+
|
| 564 |
+
def __iter__(self):
|
| 565 |
+
return iter(set().union(*self.maps))
|
| 566 |
+
|
| 567 |
+
def __contains__(self, key):
|
| 568 |
+
return any(key in m for m in self.maps)
|
| 569 |
+
|
| 570 |
+
def __bool__(self):
|
| 571 |
+
return any(self.maps)
|
| 572 |
+
|
| 573 |
+
@_recursive_repr()
|
| 574 |
+
def __repr__(self):
|
| 575 |
+
return '{0.__class__.__name__}({1})'.format(
|
| 576 |
+
self, ', '.join(map(repr, self.maps)))
|
| 577 |
+
|
| 578 |
+
@classmethod
|
| 579 |
+
def fromkeys(cls, iterable, *args):
|
| 580 |
+
'Create a ChainMap with a single dict created from the iterable.'
|
| 581 |
+
return cls(dict.fromkeys(iterable, *args))
|
| 582 |
+
|
| 583 |
+
def copy(self):
|
| 584 |
+
'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
|
| 585 |
+
return self.__class__(self.maps[0].copy(), *self.maps[1:])
|
| 586 |
+
|
| 587 |
+
__copy__ = copy
|
| 588 |
+
|
| 589 |
+
def new_child(self): # like Django's Context.push()
|
| 590 |
+
'New ChainMap with a new dict followed by all previous maps.'
|
| 591 |
+
return self.__class__({}, *self.maps)
|
| 592 |
+
|
| 593 |
+
@property
|
| 594 |
+
def parents(self): # like Django's Context.pop()
|
| 595 |
+
'New ChainMap from maps[1:].'
|
| 596 |
+
return self.__class__(*self.maps[1:])
|
| 597 |
+
|
| 598 |
+
def __setitem__(self, key, value):
|
| 599 |
+
self.maps[0][key] = value
|
| 600 |
+
|
| 601 |
+
def __delitem__(self, key):
|
| 602 |
+
try:
|
| 603 |
+
del self.maps[0][key]
|
| 604 |
+
except KeyError:
|
| 605 |
+
raise KeyError(
|
| 606 |
+
'Key not found in the first mapping: {!r}'.format(key))
|
| 607 |
+
|
| 608 |
+
def popitem(self):
|
| 609 |
+
'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
|
| 610 |
+
try:
|
| 611 |
+
return self.maps[0].popitem()
|
| 612 |
+
except KeyError:
|
| 613 |
+
raise KeyError('No keys found in the first mapping.')
|
| 614 |
+
|
| 615 |
+
def pop(self, key, *args):
|
| 616 |
+
'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
|
| 617 |
+
try:
|
| 618 |
+
return self.maps[0].pop(key, *args)
|
| 619 |
+
except KeyError:
|
| 620 |
+
raise KeyError(
|
| 621 |
+
'Key not found in the first mapping: {!r}'.format(key))
|
| 622 |
+
|
| 623 |
+
def clear(self):
|
| 624 |
+
'Clear maps[0], leaving maps[1:] intact.'
|
| 625 |
+
self.maps[0].clear()
|
| 626 |
+
|
| 627 |
+
|
| 628 |
+
try:
|
| 629 |
+
from importlib.util import cache_from_source # Python >= 3.4
|
| 630 |
+
except ImportError: # pragma: no cover
|
| 631 |
+
|
| 632 |
+
def cache_from_source(path, debug_override=None):
|
| 633 |
+
assert path.endswith('.py')
|
| 634 |
+
if debug_override is None:
|
| 635 |
+
debug_override = __debug__
|
| 636 |
+
if debug_override:
|
| 637 |
+
suffix = 'c'
|
| 638 |
+
else:
|
| 639 |
+
suffix = 'o'
|
| 640 |
+
return path + suffix
|
| 641 |
+
|
| 642 |
+
|
| 643 |
+
try:
|
| 644 |
+
from collections import OrderedDict
|
| 645 |
+
except ImportError: # pragma: no cover
|
| 646 |
+
# {{{ http://code.activestate.com/recipes/576693/ (r9)
|
| 647 |
+
# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
|
| 648 |
+
# Passes Python2.7's test suite and incorporates all the latest updates.
|
| 649 |
+
try:
|
| 650 |
+
from thread import get_ident as _get_ident
|
| 651 |
+
except ImportError:
|
| 652 |
+
from dummy_thread import get_ident as _get_ident
|
| 653 |
+
|
| 654 |
+
try:
|
| 655 |
+
from _abcoll import KeysView, ValuesView, ItemsView
|
| 656 |
+
except ImportError:
|
| 657 |
+
pass
|
| 658 |
+
|
| 659 |
+
class OrderedDict(dict):
|
| 660 |
+
'Dictionary that remembers insertion order'
|
| 661 |
+
|
| 662 |
+
# An inherited dict maps keys to values.
|
| 663 |
+
# The inherited dict provides __getitem__, __len__, __contains__, and get.
|
| 664 |
+
# The remaining methods are order-aware.
|
| 665 |
+
# Big-O running times for all methods are the same as for regular dictionaries.
|
| 666 |
+
|
| 667 |
+
# The internal self.__map dictionary maps keys to links in a doubly linked list.
|
| 668 |
+
# The circular doubly linked list starts and ends with a sentinel element.
|
| 669 |
+
# The sentinel element never gets deleted (this simplifies the algorithm).
|
| 670 |
+
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
|
| 671 |
+
|
| 672 |
+
def __init__(self, *args, **kwds):
|
| 673 |
+
'''Initialize an ordered dictionary. Signature is the same as for
|
| 674 |
+
regular dictionaries, but keyword arguments are not recommended
|
| 675 |
+
because their insertion order is arbitrary.
|
| 676 |
+
|
| 677 |
+
'''
|
| 678 |
+
if len(args) > 1:
|
| 679 |
+
raise TypeError('expected at most 1 arguments, got %d' %
|
| 680 |
+
len(args))
|
| 681 |
+
try:
|
| 682 |
+
self.__root
|
| 683 |
+
except AttributeError:
|
| 684 |
+
self.__root = root = [] # sentinel node
|
| 685 |
+
root[:] = [root, root, None]
|
| 686 |
+
self.__map = {}
|
| 687 |
+
self.__update(*args, **kwds)
|
| 688 |
+
|
| 689 |
+
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
|
| 690 |
+
'od.__setitem__(i, y) <==> od[i]=y'
|
| 691 |
+
# Setting a new item creates a new link which goes at the end of the linked
|
| 692 |
+
# list, and the inherited dictionary is updated with the new key/value pair.
|
| 693 |
+
if key not in self:
|
| 694 |
+
root = self.__root
|
| 695 |
+
last = root[0]
|
| 696 |
+
last[1] = root[0] = self.__map[key] = [last, root, key]
|
| 697 |
+
dict_setitem(self, key, value)
|
| 698 |
+
|
| 699 |
+
def __delitem__(self, key, dict_delitem=dict.__delitem__):
|
| 700 |
+
'od.__delitem__(y) <==> del od[y]'
|
| 701 |
+
# Deleting an existing item uses self.__map to find the link which is
|
| 702 |
+
# then removed by updating the links in the predecessor and successor nodes.
|
| 703 |
+
dict_delitem(self, key)
|
| 704 |
+
link_prev, link_next, key = self.__map.pop(key)
|
| 705 |
+
link_prev[1] = link_next
|
| 706 |
+
link_next[0] = link_prev
|
| 707 |
+
|
| 708 |
+
def __iter__(self):
|
| 709 |
+
'od.__iter__() <==> iter(od)'
|
| 710 |
+
root = self.__root
|
| 711 |
+
curr = root[1]
|
| 712 |
+
while curr is not root:
|
| 713 |
+
yield curr[2]
|
| 714 |
+
curr = curr[1]
|
| 715 |
+
|
| 716 |
+
def __reversed__(self):
|
| 717 |
+
'od.__reversed__() <==> reversed(od)'
|
| 718 |
+
root = self.__root
|
| 719 |
+
curr = root[0]
|
| 720 |
+
while curr is not root:
|
| 721 |
+
yield curr[2]
|
| 722 |
+
curr = curr[0]
|
| 723 |
+
|
| 724 |
+
def clear(self):
|
| 725 |
+
'od.clear() -> None. Remove all items from od.'
|
| 726 |
+
try:
|
| 727 |
+
for node in self.__map.itervalues():
|
| 728 |
+
del node[:]
|
| 729 |
+
root = self.__root
|
| 730 |
+
root[:] = [root, root, None]
|
| 731 |
+
self.__map.clear()
|
| 732 |
+
except AttributeError:
|
| 733 |
+
pass
|
| 734 |
+
dict.clear(self)
|
| 735 |
+
|
| 736 |
+
def popitem(self, last=True):
|
| 737 |
+
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
|
| 738 |
+
Pairs are returned in LIFO order if last is true or FIFO order if false.
|
| 739 |
+
|
| 740 |
+
'''
|
| 741 |
+
if not self:
|
| 742 |
+
raise KeyError('dictionary is empty')
|
| 743 |
+
root = self.__root
|
| 744 |
+
if last:
|
| 745 |
+
link = root[0]
|
| 746 |
+
link_prev = link[0]
|
| 747 |
+
link_prev[1] = root
|
| 748 |
+
root[0] = link_prev
|
| 749 |
+
else:
|
| 750 |
+
link = root[1]
|
| 751 |
+
link_next = link[1]
|
| 752 |
+
root[1] = link_next
|
| 753 |
+
link_next[0] = root
|
| 754 |
+
key = link[2]
|
| 755 |
+
del self.__map[key]
|
| 756 |
+
value = dict.pop(self, key)
|
| 757 |
+
return key, value
|
| 758 |
+
|
| 759 |
+
# -- the following methods do not depend on the internal structure --
|
| 760 |
+
|
| 761 |
+
def keys(self):
|
| 762 |
+
'od.keys() -> list of keys in od'
|
| 763 |
+
return list(self)
|
| 764 |
+
|
| 765 |
+
def values(self):
|
| 766 |
+
'od.values() -> list of values in od'
|
| 767 |
+
return [self[key] for key in self]
|
| 768 |
+
|
| 769 |
+
def items(self):
|
| 770 |
+
'od.items() -> list of (key, value) pairs in od'
|
| 771 |
+
return [(key, self[key]) for key in self]
|
| 772 |
+
|
| 773 |
+
def iterkeys(self):
|
| 774 |
+
'od.iterkeys() -> an iterator over the keys in od'
|
| 775 |
+
return iter(self)
|
| 776 |
+
|
| 777 |
+
def itervalues(self):
|
| 778 |
+
'od.itervalues -> an iterator over the values in od'
|
| 779 |
+
for k in self:
|
| 780 |
+
yield self[k]
|
| 781 |
+
|
| 782 |
+
def iteritems(self):
|
| 783 |
+
'od.iteritems -> an iterator over the (key, value) items in od'
|
| 784 |
+
for k in self:
|
| 785 |
+
yield (k, self[k])
|
| 786 |
+
|
| 787 |
+
def update(*args, **kwds):
|
| 788 |
+
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
|
| 789 |
+
|
| 790 |
+
If E is a dict instance, does: for k in E: od[k] = E[k]
|
| 791 |
+
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
|
| 792 |
+
Or if E is an iterable of items, does: for k, v in E: od[k] = v
|
| 793 |
+
In either case, this is followed by: for k, v in F.items(): od[k] = v
|
| 794 |
+
|
| 795 |
+
'''
|
| 796 |
+
if len(args) > 2:
|
| 797 |
+
raise TypeError('update() takes at most 2 positional '
|
| 798 |
+
'arguments (%d given)' % (len(args), ))
|
| 799 |
+
elif not args:
|
| 800 |
+
raise TypeError('update() takes at least 1 argument (0 given)')
|
| 801 |
+
self = args[0]
|
| 802 |
+
# Make progressively weaker assumptions about "other"
|
| 803 |
+
other = ()
|
| 804 |
+
if len(args) == 2:
|
| 805 |
+
other = args[1]
|
| 806 |
+
if isinstance(other, dict):
|
| 807 |
+
for key in other:
|
| 808 |
+
self[key] = other[key]
|
| 809 |
+
elif hasattr(other, 'keys'):
|
| 810 |
+
for key in other.keys():
|
| 811 |
+
self[key] = other[key]
|
| 812 |
+
else:
|
| 813 |
+
for key, value in other:
|
| 814 |
+
self[key] = value
|
| 815 |
+
for key, value in kwds.items():
|
| 816 |
+
self[key] = value
|
| 817 |
+
|
| 818 |
+
__update = update # let subclasses override update without breaking __init__
|
| 819 |
+
|
| 820 |
+
__marker = object()
|
| 821 |
+
|
| 822 |
+
def pop(self, key, default=__marker):
|
| 823 |
+
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
|
| 824 |
+
If key is not found, d is returned if given, otherwise KeyError is raised.
|
| 825 |
+
|
| 826 |
+
'''
|
| 827 |
+
if key in self:
|
| 828 |
+
result = self[key]
|
| 829 |
+
del self[key]
|
| 830 |
+
return result
|
| 831 |
+
if default is self.__marker:
|
| 832 |
+
raise KeyError(key)
|
| 833 |
+
return default
|
| 834 |
+
|
| 835 |
+
def setdefault(self, key, default=None):
|
| 836 |
+
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
|
| 837 |
+
if key in self:
|
| 838 |
+
return self[key]
|
| 839 |
+
self[key] = default
|
| 840 |
+
return default
|
| 841 |
+
|
| 842 |
+
def __repr__(self, _repr_running=None):
|
| 843 |
+
'od.__repr__() <==> repr(od)'
|
| 844 |
+
if not _repr_running:
|
| 845 |
+
_repr_running = {}
|
| 846 |
+
call_key = id(self), _get_ident()
|
| 847 |
+
if call_key in _repr_running:
|
| 848 |
+
return '...'
|
| 849 |
+
_repr_running[call_key] = 1
|
| 850 |
+
try:
|
| 851 |
+
if not self:
|
| 852 |
+
return '%s()' % (self.__class__.__name__, )
|
| 853 |
+
return '%s(%r)' % (self.__class__.__name__, self.items())
|
| 854 |
+
finally:
|
| 855 |
+
del _repr_running[call_key]
|
| 856 |
+
|
| 857 |
+
def __reduce__(self):
|
| 858 |
+
'Return state information for pickling'
|
| 859 |
+
items = [[k, self[k]] for k in self]
|
| 860 |
+
inst_dict = vars(self).copy()
|
| 861 |
+
for k in vars(OrderedDict()):
|
| 862 |
+
inst_dict.pop(k, None)
|
| 863 |
+
if inst_dict:
|
| 864 |
+
return (self.__class__, (items, ), inst_dict)
|
| 865 |
+
return self.__class__, (items, )
|
| 866 |
+
|
| 867 |
+
def copy(self):
|
| 868 |
+
'od.copy() -> a shallow copy of od'
|
| 869 |
+
return self.__class__(self)
|
| 870 |
+
|
| 871 |
+
@classmethod
|
| 872 |
+
def fromkeys(cls, iterable, value=None):
|
| 873 |
+
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
|
| 874 |
+
and values equal to v (which defaults to None).
|
| 875 |
+
|
| 876 |
+
'''
|
| 877 |
+
d = cls()
|
| 878 |
+
for key in iterable:
|
| 879 |
+
d[key] = value
|
| 880 |
+
return d
|
| 881 |
+
|
| 882 |
+
def __eq__(self, other):
|
| 883 |
+
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
|
| 884 |
+
while comparison to a regular mapping is order-insensitive.
|
| 885 |
+
|
| 886 |
+
'''
|
| 887 |
+
if isinstance(other, OrderedDict):
|
| 888 |
+
return len(self) == len(
|
| 889 |
+
other) and self.items() == other.items()
|
| 890 |
+
return dict.__eq__(self, other)
|
| 891 |
+
|
| 892 |
+
def __ne__(self, other):
|
| 893 |
+
return not self == other
|
| 894 |
+
|
| 895 |
+
# -- the following methods are only used in Python 2.7 --
|
| 896 |
+
|
| 897 |
+
def viewkeys(self):
|
| 898 |
+
"od.viewkeys() -> a set-like object providing a view on od's keys"
|
| 899 |
+
return KeysView(self)
|
| 900 |
+
|
| 901 |
+
def viewvalues(self):
|
| 902 |
+
"od.viewvalues() -> an object providing a view on od's values"
|
| 903 |
+
return ValuesView(self)
|
| 904 |
+
|
| 905 |
+
def viewitems(self):
|
| 906 |
+
"od.viewitems() -> a set-like object providing a view on od's items"
|
| 907 |
+
return ItemsView(self)
|
| 908 |
+
|
| 909 |
+
|
| 910 |
+
try:
|
| 911 |
+
from logging.config import BaseConfigurator, valid_ident
|
| 912 |
+
except ImportError: # pragma: no cover
|
| 913 |
+
IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
|
| 914 |
+
|
| 915 |
+
def valid_ident(s):
|
| 916 |
+
m = IDENTIFIER.match(s)
|
| 917 |
+
if not m:
|
| 918 |
+
raise ValueError('Not a valid Python identifier: %r' % s)
|
| 919 |
+
return True
|
| 920 |
+
|
| 921 |
+
# The ConvertingXXX classes are wrappers around standard Python containers,
|
| 922 |
+
# and they serve to convert any suitable values in the container. The
|
| 923 |
+
# conversion converts base dicts, lists and tuples to their wrapped
|
| 924 |
+
# equivalents, whereas strings which match a conversion format are converted
|
| 925 |
+
# appropriately.
|
| 926 |
+
#
|
| 927 |
+
# Each wrapper should have a configurator attribute holding the actual
|
| 928 |
+
# configurator to use for conversion.
|
| 929 |
+
|
| 930 |
+
class ConvertingDict(dict):
|
| 931 |
+
"""A converting dictionary wrapper."""
|
| 932 |
+
|
| 933 |
+
def __getitem__(self, key):
|
| 934 |
+
value = dict.__getitem__(self, key)
|
| 935 |
+
result = self.configurator.convert(value)
|
| 936 |
+
# If the converted value is different, save for next time
|
| 937 |
+
if value is not result:
|
| 938 |
+
self[key] = result
|
| 939 |
+
if type(result) in (ConvertingDict, ConvertingList,
|
| 940 |
+
ConvertingTuple):
|
| 941 |
+
result.parent = self
|
| 942 |
+
result.key = key
|
| 943 |
+
return result
|
| 944 |
+
|
| 945 |
+
def get(self, key, default=None):
|
| 946 |
+
value = dict.get(self, key, default)
|
| 947 |
+
result = self.configurator.convert(value)
|
| 948 |
+
# If the converted value is different, save for next time
|
| 949 |
+
if value is not result:
|
| 950 |
+
self[key] = result
|
| 951 |
+
if type(result) in (ConvertingDict, ConvertingList,
|
| 952 |
+
ConvertingTuple):
|
| 953 |
+
result.parent = self
|
| 954 |
+
result.key = key
|
| 955 |
+
return result
|
| 956 |
+
|
| 957 |
+
def pop(self, key, default=None):
|
| 958 |
+
value = dict.pop(self, key, default)
|
| 959 |
+
result = self.configurator.convert(value)
|
| 960 |
+
if value is not result:
|
| 961 |
+
if type(result) in (ConvertingDict, ConvertingList,
|
| 962 |
+
ConvertingTuple):
|
| 963 |
+
result.parent = self
|
| 964 |
+
result.key = key
|
| 965 |
+
return result
|
| 966 |
+
|
| 967 |
+
class ConvertingList(list):
|
| 968 |
+
"""A converting list wrapper."""
|
| 969 |
+
|
| 970 |
+
def __getitem__(self, key):
|
| 971 |
+
value = list.__getitem__(self, key)
|
| 972 |
+
result = self.configurator.convert(value)
|
| 973 |
+
# If the converted value is different, save for next time
|
| 974 |
+
if value is not result:
|
| 975 |
+
self[key] = result
|
| 976 |
+
if type(result) in (ConvertingDict, ConvertingList,
|
| 977 |
+
ConvertingTuple):
|
| 978 |
+
result.parent = self
|
| 979 |
+
result.key = key
|
| 980 |
+
return result
|
| 981 |
+
|
| 982 |
+
def pop(self, idx=-1):
|
| 983 |
+
value = list.pop(self, idx)
|
| 984 |
+
result = self.configurator.convert(value)
|
| 985 |
+
if value is not result:
|
| 986 |
+
if type(result) in (ConvertingDict, ConvertingList,
|
| 987 |
+
ConvertingTuple):
|
| 988 |
+
result.parent = self
|
| 989 |
+
return result
|
| 990 |
+
|
| 991 |
+
class ConvertingTuple(tuple):
|
| 992 |
+
"""A converting tuple wrapper."""
|
| 993 |
+
|
| 994 |
+
def __getitem__(self, key):
|
| 995 |
+
value = tuple.__getitem__(self, key)
|
| 996 |
+
result = self.configurator.convert(value)
|
| 997 |
+
if value is not result:
|
| 998 |
+
if type(result) in (ConvertingDict, ConvertingList,
|
| 999 |
+
ConvertingTuple):
|
| 1000 |
+
result.parent = self
|
| 1001 |
+
result.key = key
|
| 1002 |
+
return result
|
| 1003 |
+
|
| 1004 |
+
class BaseConfigurator(object):
|
| 1005 |
+
"""
|
| 1006 |
+
The configurator base class which defines some useful defaults.
|
| 1007 |
+
"""
|
| 1008 |
+
|
| 1009 |
+
CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
|
| 1010 |
+
|
| 1011 |
+
WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
|
| 1012 |
+
DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
|
| 1013 |
+
INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
|
| 1014 |
+
DIGIT_PATTERN = re.compile(r'^\d+$')
|
| 1015 |
+
|
| 1016 |
+
value_converters = {
|
| 1017 |
+
'ext': 'ext_convert',
|
| 1018 |
+
'cfg': 'cfg_convert',
|
| 1019 |
+
}
|
| 1020 |
+
|
| 1021 |
+
# We might want to use a different one, e.g. importlib
|
| 1022 |
+
importer = staticmethod(__import__)
|
| 1023 |
+
|
| 1024 |
+
def __init__(self, config):
|
| 1025 |
+
self.config = ConvertingDict(config)
|
| 1026 |
+
self.config.configurator = self
|
| 1027 |
+
|
| 1028 |
+
def resolve(self, s):
|
| 1029 |
+
"""
|
| 1030 |
+
Resolve strings to objects using standard import and attribute
|
| 1031 |
+
syntax.
|
| 1032 |
+
"""
|
| 1033 |
+
name = s.split('.')
|
| 1034 |
+
used = name.pop(0)
|
| 1035 |
+
try:
|
| 1036 |
+
found = self.importer(used)
|
| 1037 |
+
for frag in name:
|
| 1038 |
+
used += '.' + frag
|
| 1039 |
+
try:
|
| 1040 |
+
found = getattr(found, frag)
|
| 1041 |
+
except AttributeError:
|
| 1042 |
+
self.importer(used)
|
| 1043 |
+
found = getattr(found, frag)
|
| 1044 |
+
return found
|
| 1045 |
+
except ImportError:
|
| 1046 |
+
e, tb = sys.exc_info()[1:]
|
| 1047 |
+
v = ValueError('Cannot resolve %r: %s' % (s, e))
|
| 1048 |
+
v.__cause__, v.__traceback__ = e, tb
|
| 1049 |
+
raise v
|
| 1050 |
+
|
| 1051 |
+
def ext_convert(self, value):
|
| 1052 |
+
"""Default converter for the ext:// protocol."""
|
| 1053 |
+
return self.resolve(value)
|
| 1054 |
+
|
| 1055 |
+
def cfg_convert(self, value):
|
| 1056 |
+
"""Default converter for the cfg:// protocol."""
|
| 1057 |
+
rest = value
|
| 1058 |
+
m = self.WORD_PATTERN.match(rest)
|
| 1059 |
+
if m is None:
|
| 1060 |
+
raise ValueError("Unable to convert %r" % value)
|
| 1061 |
+
else:
|
| 1062 |
+
rest = rest[m.end():]
|
| 1063 |
+
d = self.config[m.groups()[0]]
|
| 1064 |
+
while rest:
|
| 1065 |
+
m = self.DOT_PATTERN.match(rest)
|
| 1066 |
+
if m:
|
| 1067 |
+
d = d[m.groups()[0]]
|
| 1068 |
+
else:
|
| 1069 |
+
m = self.INDEX_PATTERN.match(rest)
|
| 1070 |
+
if m:
|
| 1071 |
+
idx = m.groups()[0]
|
| 1072 |
+
if not self.DIGIT_PATTERN.match(idx):
|
| 1073 |
+
d = d[idx]
|
| 1074 |
+
else:
|
| 1075 |
+
try:
|
| 1076 |
+
n = int(
|
| 1077 |
+
idx
|
| 1078 |
+
) # try as number first (most likely)
|
| 1079 |
+
d = d[n]
|
| 1080 |
+
except TypeError:
|
| 1081 |
+
d = d[idx]
|
| 1082 |
+
if m:
|
| 1083 |
+
rest = rest[m.end():]
|
| 1084 |
+
else:
|
| 1085 |
+
raise ValueError('Unable to convert '
|
| 1086 |
+
'%r at %r' % (value, rest))
|
| 1087 |
+
# rest should be empty
|
| 1088 |
+
return d
|
| 1089 |
+
|
| 1090 |
+
def convert(self, value):
|
| 1091 |
+
"""
|
| 1092 |
+
Convert values to an appropriate type. dicts, lists and tuples are
|
| 1093 |
+
replaced by their converting alternatives. Strings are checked to
|
| 1094 |
+
see if they have a conversion format and are converted if they do.
|
| 1095 |
+
"""
|
| 1096 |
+
if not isinstance(value, ConvertingDict) and isinstance(
|
| 1097 |
+
value, dict):
|
| 1098 |
+
value = ConvertingDict(value)
|
| 1099 |
+
value.configurator = self
|
| 1100 |
+
elif not isinstance(value, ConvertingList) and isinstance(
|
| 1101 |
+
value, list):
|
| 1102 |
+
value = ConvertingList(value)
|
| 1103 |
+
value.configurator = self
|
| 1104 |
+
elif not isinstance(value, ConvertingTuple) and isinstance(value, tuple):
|
| 1105 |
+
value = ConvertingTuple(value)
|
| 1106 |
+
value.configurator = self
|
| 1107 |
+
elif isinstance(value, string_types):
|
| 1108 |
+
m = self.CONVERT_PATTERN.match(value)
|
| 1109 |
+
if m:
|
| 1110 |
+
d = m.groupdict()
|
| 1111 |
+
prefix = d['prefix']
|
| 1112 |
+
converter = self.value_converters.get(prefix, None)
|
| 1113 |
+
if converter:
|
| 1114 |
+
suffix = d['suffix']
|
| 1115 |
+
converter = getattr(self, converter)
|
| 1116 |
+
value = converter(suffix)
|
| 1117 |
+
return value
|
| 1118 |
+
|
| 1119 |
+
def configure_custom(self, config):
|
| 1120 |
+
"""Configure an object with a user-supplied factory."""
|
| 1121 |
+
c = config.pop('()')
|
| 1122 |
+
if not callable(c):
|
| 1123 |
+
c = self.resolve(c)
|
| 1124 |
+
props = config.pop('.', None)
|
| 1125 |
+
# Check for valid identifiers
|
| 1126 |
+
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
|
| 1127 |
+
result = c(**kwargs)
|
| 1128 |
+
if props:
|
| 1129 |
+
for name, value in props.items():
|
| 1130 |
+
setattr(result, name, value)
|
| 1131 |
+
return result
|
| 1132 |
+
|
| 1133 |
+
def as_tuple(self, value):
|
| 1134 |
+
"""Utility function which converts lists to tuples."""
|
| 1135 |
+
if isinstance(value, list):
|
| 1136 |
+
value = tuple(value)
|
| 1137 |
+
return value
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/index.py
ADDED
|
@@ -0,0 +1,508 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
#
|
| 3 |
+
# Copyright (C) 2013-2023 Vinay Sajip.
|
| 4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
| 5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 6 |
+
#
|
| 7 |
+
import hashlib
|
| 8 |
+
import logging
|
| 9 |
+
import os
|
| 10 |
+
import shutil
|
| 11 |
+
import subprocess
|
| 12 |
+
import tempfile
|
| 13 |
+
try:
|
| 14 |
+
from threading import Thread
|
| 15 |
+
except ImportError: # pragma: no cover
|
| 16 |
+
from dummy_threading import Thread
|
| 17 |
+
|
| 18 |
+
from . import DistlibException
|
| 19 |
+
from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
|
| 20 |
+
urlparse, build_opener, string_types)
|
| 21 |
+
from .util import zip_dir, ServerProxy
|
| 22 |
+
|
| 23 |
+
logger = logging.getLogger(__name__)
|
| 24 |
+
|
| 25 |
+
DEFAULT_INDEX = 'https://pypi.org/pypi'
|
| 26 |
+
DEFAULT_REALM = 'pypi'
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class PackageIndex(object):
|
| 30 |
+
"""
|
| 31 |
+
This class represents a package index compatible with PyPI, the Python
|
| 32 |
+
Package Index.
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'
|
| 36 |
+
|
| 37 |
+
def __init__(self, url=None):
|
| 38 |
+
"""
|
| 39 |
+
Initialise an instance.
|
| 40 |
+
|
| 41 |
+
:param url: The URL of the index. If not specified, the URL for PyPI is
|
| 42 |
+
used.
|
| 43 |
+
"""
|
| 44 |
+
self.url = url or DEFAULT_INDEX
|
| 45 |
+
self.read_configuration()
|
| 46 |
+
scheme, netloc, path, params, query, frag = urlparse(self.url)
|
| 47 |
+
if params or query or frag or scheme not in ('http', 'https'):
|
| 48 |
+
raise DistlibException('invalid repository: %s' % self.url)
|
| 49 |
+
self.password_handler = None
|
| 50 |
+
self.ssl_verifier = None
|
| 51 |
+
self.gpg = None
|
| 52 |
+
self.gpg_home = None
|
| 53 |
+
with open(os.devnull, 'w') as sink:
|
| 54 |
+
# Use gpg by default rather than gpg2, as gpg2 insists on
|
| 55 |
+
# prompting for passwords
|
| 56 |
+
for s in ('gpg', 'gpg2'):
|
| 57 |
+
try:
|
| 58 |
+
rc = subprocess.check_call([s, '--version'], stdout=sink,
|
| 59 |
+
stderr=sink)
|
| 60 |
+
if rc == 0:
|
| 61 |
+
self.gpg = s
|
| 62 |
+
break
|
| 63 |
+
except OSError:
|
| 64 |
+
pass
|
| 65 |
+
|
| 66 |
+
def _get_pypirc_command(self):
|
| 67 |
+
"""
|
| 68 |
+
Get the distutils command for interacting with PyPI configurations.
|
| 69 |
+
:return: the command.
|
| 70 |
+
"""
|
| 71 |
+
from .util import _get_pypirc_command as cmd
|
| 72 |
+
return cmd()
|
| 73 |
+
|
| 74 |
+
def read_configuration(self):
|
| 75 |
+
"""
|
| 76 |
+
Read the PyPI access configuration as supported by distutils. This populates
|
| 77 |
+
``username``, ``password``, ``realm`` and ``url`` attributes from the
|
| 78 |
+
configuration.
|
| 79 |
+
"""
|
| 80 |
+
from .util import _load_pypirc
|
| 81 |
+
cfg = _load_pypirc(self)
|
| 82 |
+
self.username = cfg.get('username')
|
| 83 |
+
self.password = cfg.get('password')
|
| 84 |
+
self.realm = cfg.get('realm', 'pypi')
|
| 85 |
+
self.url = cfg.get('repository', self.url)
|
| 86 |
+
|
| 87 |
+
def save_configuration(self):
|
| 88 |
+
"""
|
| 89 |
+
Save the PyPI access configuration. You must have set ``username`` and
|
| 90 |
+
``password`` attributes before calling this method.
|
| 91 |
+
"""
|
| 92 |
+
self.check_credentials()
|
| 93 |
+
from .util import _store_pypirc
|
| 94 |
+
_store_pypirc(self)
|
| 95 |
+
|
| 96 |
+
def check_credentials(self):
|
| 97 |
+
"""
|
| 98 |
+
Check that ``username`` and ``password`` have been set, and raise an
|
| 99 |
+
exception if not.
|
| 100 |
+
"""
|
| 101 |
+
if self.username is None or self.password is None:
|
| 102 |
+
raise DistlibException('username and password must be set')
|
| 103 |
+
pm = HTTPPasswordMgr()
|
| 104 |
+
_, netloc, _, _, _, _ = urlparse(self.url)
|
| 105 |
+
pm.add_password(self.realm, netloc, self.username, self.password)
|
| 106 |
+
self.password_handler = HTTPBasicAuthHandler(pm)
|
| 107 |
+
|
| 108 |
+
def register(self, metadata): # pragma: no cover
|
| 109 |
+
"""
|
| 110 |
+
Register a distribution on PyPI, using the provided metadata.
|
| 111 |
+
|
| 112 |
+
:param metadata: A :class:`Metadata` instance defining at least a name
|
| 113 |
+
and version number for the distribution to be
|
| 114 |
+
registered.
|
| 115 |
+
:return: The HTTP response received from PyPI upon submission of the
|
| 116 |
+
request.
|
| 117 |
+
"""
|
| 118 |
+
self.check_credentials()
|
| 119 |
+
metadata.validate()
|
| 120 |
+
d = metadata.todict()
|
| 121 |
+
d[':action'] = 'verify'
|
| 122 |
+
request = self.encode_request(d.items(), [])
|
| 123 |
+
self.send_request(request)
|
| 124 |
+
d[':action'] = 'submit'
|
| 125 |
+
request = self.encode_request(d.items(), [])
|
| 126 |
+
return self.send_request(request)
|
| 127 |
+
|
| 128 |
+
def _reader(self, name, stream, outbuf):
|
| 129 |
+
"""
|
| 130 |
+
Thread runner for reading lines of from a subprocess into a buffer.
|
| 131 |
+
|
| 132 |
+
:param name: The logical name of the stream (used for logging only).
|
| 133 |
+
:param stream: The stream to read from. This will typically a pipe
|
| 134 |
+
connected to the output stream of a subprocess.
|
| 135 |
+
:param outbuf: The list to append the read lines to.
|
| 136 |
+
"""
|
| 137 |
+
while True:
|
| 138 |
+
s = stream.readline()
|
| 139 |
+
if not s:
|
| 140 |
+
break
|
| 141 |
+
s = s.decode('utf-8').rstrip()
|
| 142 |
+
outbuf.append(s)
|
| 143 |
+
logger.debug('%s: %s' % (name, s))
|
| 144 |
+
stream.close()
|
| 145 |
+
|
| 146 |
+
def get_sign_command(self, filename, signer, sign_password, keystore=None): # pragma: no cover
|
| 147 |
+
"""
|
| 148 |
+
Return a suitable command for signing a file.
|
| 149 |
+
|
| 150 |
+
:param filename: The pathname to the file to be signed.
|
| 151 |
+
:param signer: The identifier of the signer of the file.
|
| 152 |
+
:param sign_password: The passphrase for the signer's
|
| 153 |
+
private key used for signing.
|
| 154 |
+
:param keystore: The path to a directory which contains the keys
|
| 155 |
+
used in verification. If not specified, the
|
| 156 |
+
instance's ``gpg_home`` attribute is used instead.
|
| 157 |
+
:return: The signing command as a list suitable to be
|
| 158 |
+
passed to :class:`subprocess.Popen`.
|
| 159 |
+
"""
|
| 160 |
+
cmd = [self.gpg, '--status-fd', '2', '--no-tty']
|
| 161 |
+
if keystore is None:
|
| 162 |
+
keystore = self.gpg_home
|
| 163 |
+
if keystore:
|
| 164 |
+
cmd.extend(['--homedir', keystore])
|
| 165 |
+
if sign_password is not None:
|
| 166 |
+
cmd.extend(['--batch', '--passphrase-fd', '0'])
|
| 167 |
+
td = tempfile.mkdtemp()
|
| 168 |
+
sf = os.path.join(td, os.path.basename(filename) + '.asc')
|
| 169 |
+
cmd.extend(['--detach-sign', '--armor', '--local-user',
|
| 170 |
+
signer, '--output', sf, filename])
|
| 171 |
+
logger.debug('invoking: %s', ' '.join(cmd))
|
| 172 |
+
return cmd, sf
|
| 173 |
+
|
| 174 |
+
def run_command(self, cmd, input_data=None):
|
| 175 |
+
"""
|
| 176 |
+
Run a command in a child process , passing it any input data specified.
|
| 177 |
+
|
| 178 |
+
:param cmd: The command to run.
|
| 179 |
+
:param input_data: If specified, this must be a byte string containing
|
| 180 |
+
data to be sent to the child process.
|
| 181 |
+
:return: A tuple consisting of the subprocess' exit code, a list of
|
| 182 |
+
lines read from the subprocess' ``stdout``, and a list of
|
| 183 |
+
lines read from the subprocess' ``stderr``.
|
| 184 |
+
"""
|
| 185 |
+
kwargs = {
|
| 186 |
+
'stdout': subprocess.PIPE,
|
| 187 |
+
'stderr': subprocess.PIPE,
|
| 188 |
+
}
|
| 189 |
+
if input_data is not None:
|
| 190 |
+
kwargs['stdin'] = subprocess.PIPE
|
| 191 |
+
stdout = []
|
| 192 |
+
stderr = []
|
| 193 |
+
p = subprocess.Popen(cmd, **kwargs)
|
| 194 |
+
# We don't use communicate() here because we may need to
|
| 195 |
+
# get clever with interacting with the command
|
| 196 |
+
t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))
|
| 197 |
+
t1.start()
|
| 198 |
+
t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))
|
| 199 |
+
t2.start()
|
| 200 |
+
if input_data is not None:
|
| 201 |
+
p.stdin.write(input_data)
|
| 202 |
+
p.stdin.close()
|
| 203 |
+
|
| 204 |
+
p.wait()
|
| 205 |
+
t1.join()
|
| 206 |
+
t2.join()
|
| 207 |
+
return p.returncode, stdout, stderr
|
| 208 |
+
|
| 209 |
+
def sign_file(self, filename, signer, sign_password, keystore=None): # pragma: no cover
|
| 210 |
+
"""
|
| 211 |
+
Sign a file.
|
| 212 |
+
|
| 213 |
+
:param filename: The pathname to the file to be signed.
|
| 214 |
+
:param signer: The identifier of the signer of the file.
|
| 215 |
+
:param sign_password: The passphrase for the signer's
|
| 216 |
+
private key used for signing.
|
| 217 |
+
:param keystore: The path to a directory which contains the keys
|
| 218 |
+
used in signing. If not specified, the instance's
|
| 219 |
+
``gpg_home`` attribute is used instead.
|
| 220 |
+
:return: The absolute pathname of the file where the signature is
|
| 221 |
+
stored.
|
| 222 |
+
"""
|
| 223 |
+
cmd, sig_file = self.get_sign_command(filename, signer, sign_password,
|
| 224 |
+
keystore)
|
| 225 |
+
rc, stdout, stderr = self.run_command(cmd,
|
| 226 |
+
sign_password.encode('utf-8'))
|
| 227 |
+
if rc != 0:
|
| 228 |
+
raise DistlibException('sign command failed with error '
|
| 229 |
+
'code %s' % rc)
|
| 230 |
+
return sig_file
|
| 231 |
+
|
| 232 |
+
def upload_file(self, metadata, filename, signer=None, sign_password=None,
|
| 233 |
+
filetype='sdist', pyversion='source', keystore=None):
|
| 234 |
+
"""
|
| 235 |
+
Upload a release file to the index.
|
| 236 |
+
|
| 237 |
+
:param metadata: A :class:`Metadata` instance defining at least a name
|
| 238 |
+
and version number for the file to be uploaded.
|
| 239 |
+
:param filename: The pathname of the file to be uploaded.
|
| 240 |
+
:param signer: The identifier of the signer of the file.
|
| 241 |
+
:param sign_password: The passphrase for the signer's
|
| 242 |
+
private key used for signing.
|
| 243 |
+
:param filetype: The type of the file being uploaded. This is the
|
| 244 |
+
distutils command which produced that file, e.g.
|
| 245 |
+
``sdist`` or ``bdist_wheel``.
|
| 246 |
+
:param pyversion: The version of Python which the release relates
|
| 247 |
+
to. For code compatible with any Python, this would
|
| 248 |
+
be ``source``, otherwise it would be e.g. ``3.2``.
|
| 249 |
+
:param keystore: The path to a directory which contains the keys
|
| 250 |
+
used in signing. If not specified, the instance's
|
| 251 |
+
``gpg_home`` attribute is used instead.
|
| 252 |
+
:return: The HTTP response received from PyPI upon submission of the
|
| 253 |
+
request.
|
| 254 |
+
"""
|
| 255 |
+
self.check_credentials()
|
| 256 |
+
if not os.path.exists(filename):
|
| 257 |
+
raise DistlibException('not found: %s' % filename)
|
| 258 |
+
metadata.validate()
|
| 259 |
+
d = metadata.todict()
|
| 260 |
+
sig_file = None
|
| 261 |
+
if signer:
|
| 262 |
+
if not self.gpg:
|
| 263 |
+
logger.warning('no signing program available - not signed')
|
| 264 |
+
else:
|
| 265 |
+
sig_file = self.sign_file(filename, signer, sign_password,
|
| 266 |
+
keystore)
|
| 267 |
+
with open(filename, 'rb') as f:
|
| 268 |
+
file_data = f.read()
|
| 269 |
+
md5_digest = hashlib.md5(file_data).hexdigest()
|
| 270 |
+
sha256_digest = hashlib.sha256(file_data).hexdigest()
|
| 271 |
+
d.update({
|
| 272 |
+
':action': 'file_upload',
|
| 273 |
+
'protocol_version': '1',
|
| 274 |
+
'filetype': filetype,
|
| 275 |
+
'pyversion': pyversion,
|
| 276 |
+
'md5_digest': md5_digest,
|
| 277 |
+
'sha256_digest': sha256_digest,
|
| 278 |
+
})
|
| 279 |
+
files = [('content', os.path.basename(filename), file_data)]
|
| 280 |
+
if sig_file:
|
| 281 |
+
with open(sig_file, 'rb') as f:
|
| 282 |
+
sig_data = f.read()
|
| 283 |
+
files.append(('gpg_signature', os.path.basename(sig_file),
|
| 284 |
+
sig_data))
|
| 285 |
+
shutil.rmtree(os.path.dirname(sig_file))
|
| 286 |
+
request = self.encode_request(d.items(), files)
|
| 287 |
+
return self.send_request(request)
|
| 288 |
+
|
| 289 |
+
def upload_documentation(self, metadata, doc_dir): # pragma: no cover
|
| 290 |
+
"""
|
| 291 |
+
Upload documentation to the index.
|
| 292 |
+
|
| 293 |
+
:param metadata: A :class:`Metadata` instance defining at least a name
|
| 294 |
+
and version number for the documentation to be
|
| 295 |
+
uploaded.
|
| 296 |
+
:param doc_dir: The pathname of the directory which contains the
|
| 297 |
+
documentation. This should be the directory that
|
| 298 |
+
contains the ``index.html`` for the documentation.
|
| 299 |
+
:return: The HTTP response received from PyPI upon submission of the
|
| 300 |
+
request.
|
| 301 |
+
"""
|
| 302 |
+
self.check_credentials()
|
| 303 |
+
if not os.path.isdir(doc_dir):
|
| 304 |
+
raise DistlibException('not a directory: %r' % doc_dir)
|
| 305 |
+
fn = os.path.join(doc_dir, 'index.html')
|
| 306 |
+
if not os.path.exists(fn):
|
| 307 |
+
raise DistlibException('not found: %r' % fn)
|
| 308 |
+
metadata.validate()
|
| 309 |
+
name, version = metadata.name, metadata.version
|
| 310 |
+
zip_data = zip_dir(doc_dir).getvalue()
|
| 311 |
+
fields = [(':action', 'doc_upload'),
|
| 312 |
+
('name', name), ('version', version)]
|
| 313 |
+
files = [('content', name, zip_data)]
|
| 314 |
+
request = self.encode_request(fields, files)
|
| 315 |
+
return self.send_request(request)
|
| 316 |
+
|
| 317 |
+
def get_verify_command(self, signature_filename, data_filename,
|
| 318 |
+
keystore=None):
|
| 319 |
+
"""
|
| 320 |
+
Return a suitable command for verifying a file.
|
| 321 |
+
|
| 322 |
+
:param signature_filename: The pathname to the file containing the
|
| 323 |
+
signature.
|
| 324 |
+
:param data_filename: The pathname to the file containing the
|
| 325 |
+
signed data.
|
| 326 |
+
:param keystore: The path to a directory which contains the keys
|
| 327 |
+
used in verification. If not specified, the
|
| 328 |
+
instance's ``gpg_home`` attribute is used instead.
|
| 329 |
+
:return: The verifying command as a list suitable to be
|
| 330 |
+
passed to :class:`subprocess.Popen`.
|
| 331 |
+
"""
|
| 332 |
+
cmd = [self.gpg, '--status-fd', '2', '--no-tty']
|
| 333 |
+
if keystore is None:
|
| 334 |
+
keystore = self.gpg_home
|
| 335 |
+
if keystore:
|
| 336 |
+
cmd.extend(['--homedir', keystore])
|
| 337 |
+
cmd.extend(['--verify', signature_filename, data_filename])
|
| 338 |
+
logger.debug('invoking: %s', ' '.join(cmd))
|
| 339 |
+
return cmd
|
| 340 |
+
|
| 341 |
+
def verify_signature(self, signature_filename, data_filename,
|
| 342 |
+
keystore=None):
|
| 343 |
+
"""
|
| 344 |
+
Verify a signature for a file.
|
| 345 |
+
|
| 346 |
+
:param signature_filename: The pathname to the file containing the
|
| 347 |
+
signature.
|
| 348 |
+
:param data_filename: The pathname to the file containing the
|
| 349 |
+
signed data.
|
| 350 |
+
:param keystore: The path to a directory which contains the keys
|
| 351 |
+
used in verification. If not specified, the
|
| 352 |
+
instance's ``gpg_home`` attribute is used instead.
|
| 353 |
+
:return: True if the signature was verified, else False.
|
| 354 |
+
"""
|
| 355 |
+
if not self.gpg:
|
| 356 |
+
raise DistlibException('verification unavailable because gpg '
|
| 357 |
+
'unavailable')
|
| 358 |
+
cmd = self.get_verify_command(signature_filename, data_filename,
|
| 359 |
+
keystore)
|
| 360 |
+
rc, stdout, stderr = self.run_command(cmd)
|
| 361 |
+
if rc not in (0, 1):
|
| 362 |
+
raise DistlibException('verify command failed with error code %s' % rc)
|
| 363 |
+
return rc == 0
|
| 364 |
+
|
| 365 |
+
def download_file(self, url, destfile, digest=None, reporthook=None):
|
| 366 |
+
"""
|
| 367 |
+
This is a convenience method for downloading a file from an URL.
|
| 368 |
+
Normally, this will be a file from the index, though currently
|
| 369 |
+
no check is made for this (i.e. a file can be downloaded from
|
| 370 |
+
anywhere).
|
| 371 |
+
|
| 372 |
+
The method is just like the :func:`urlretrieve` function in the
|
| 373 |
+
standard library, except that it allows digest computation to be
|
| 374 |
+
done during download and checking that the downloaded data
|
| 375 |
+
matched any expected value.
|
| 376 |
+
|
| 377 |
+
:param url: The URL of the file to be downloaded (assumed to be
|
| 378 |
+
available via an HTTP GET request).
|
| 379 |
+
:param destfile: The pathname where the downloaded file is to be
|
| 380 |
+
saved.
|
| 381 |
+
:param digest: If specified, this must be a (hasher, value)
|
| 382 |
+
tuple, where hasher is the algorithm used (e.g.
|
| 383 |
+
``'md5'``) and ``value`` is the expected value.
|
| 384 |
+
:param reporthook: The same as for :func:`urlretrieve` in the
|
| 385 |
+
standard library.
|
| 386 |
+
"""
|
| 387 |
+
if digest is None:
|
| 388 |
+
digester = None
|
| 389 |
+
logger.debug('No digest specified')
|
| 390 |
+
else:
|
| 391 |
+
if isinstance(digest, (list, tuple)):
|
| 392 |
+
hasher, digest = digest
|
| 393 |
+
else:
|
| 394 |
+
hasher = 'md5'
|
| 395 |
+
digester = getattr(hashlib, hasher)()
|
| 396 |
+
logger.debug('Digest specified: %s' % digest)
|
| 397 |
+
# The following code is equivalent to urlretrieve.
|
| 398 |
+
# We need to do it this way so that we can compute the
|
| 399 |
+
# digest of the file as we go.
|
| 400 |
+
with open(destfile, 'wb') as dfp:
|
| 401 |
+
# addinfourl is not a context manager on 2.x
|
| 402 |
+
# so we have to use try/finally
|
| 403 |
+
sfp = self.send_request(Request(url))
|
| 404 |
+
try:
|
| 405 |
+
headers = sfp.info()
|
| 406 |
+
blocksize = 8192
|
| 407 |
+
size = -1
|
| 408 |
+
read = 0
|
| 409 |
+
blocknum = 0
|
| 410 |
+
if "content-length" in headers:
|
| 411 |
+
size = int(headers["Content-Length"])
|
| 412 |
+
if reporthook:
|
| 413 |
+
reporthook(blocknum, blocksize, size)
|
| 414 |
+
while True:
|
| 415 |
+
block = sfp.read(blocksize)
|
| 416 |
+
if not block:
|
| 417 |
+
break
|
| 418 |
+
read += len(block)
|
| 419 |
+
dfp.write(block)
|
| 420 |
+
if digester:
|
| 421 |
+
digester.update(block)
|
| 422 |
+
blocknum += 1
|
| 423 |
+
if reporthook:
|
| 424 |
+
reporthook(blocknum, blocksize, size)
|
| 425 |
+
finally:
|
| 426 |
+
sfp.close()
|
| 427 |
+
|
| 428 |
+
# check that we got the whole file, if we can
|
| 429 |
+
if size >= 0 and read < size:
|
| 430 |
+
raise DistlibException(
|
| 431 |
+
'retrieval incomplete: got only %d out of %d bytes'
|
| 432 |
+
% (read, size))
|
| 433 |
+
# if we have a digest, it must match.
|
| 434 |
+
if digester:
|
| 435 |
+
actual = digester.hexdigest()
|
| 436 |
+
if digest != actual:
|
| 437 |
+
raise DistlibException('%s digest mismatch for %s: expected '
|
| 438 |
+
'%s, got %s' % (hasher, destfile,
|
| 439 |
+
digest, actual))
|
| 440 |
+
logger.debug('Digest verified: %s', digest)
|
| 441 |
+
|
| 442 |
+
def send_request(self, req):
|
| 443 |
+
"""
|
| 444 |
+
Send a standard library :class:`Request` to PyPI and return its
|
| 445 |
+
response.
|
| 446 |
+
|
| 447 |
+
:param req: The request to send.
|
| 448 |
+
:return: The HTTP response from PyPI (a standard library HTTPResponse).
|
| 449 |
+
"""
|
| 450 |
+
handlers = []
|
| 451 |
+
if self.password_handler:
|
| 452 |
+
handlers.append(self.password_handler)
|
| 453 |
+
if self.ssl_verifier:
|
| 454 |
+
handlers.append(self.ssl_verifier)
|
| 455 |
+
opener = build_opener(*handlers)
|
| 456 |
+
return opener.open(req)
|
| 457 |
+
|
| 458 |
+
def encode_request(self, fields, files):
|
| 459 |
+
"""
|
| 460 |
+
Encode fields and files for posting to an HTTP server.
|
| 461 |
+
|
| 462 |
+
:param fields: The fields to send as a list of (fieldname, value)
|
| 463 |
+
tuples.
|
| 464 |
+
:param files: The files to send as a list of (fieldname, filename,
|
| 465 |
+
file_bytes) tuple.
|
| 466 |
+
"""
|
| 467 |
+
# Adapted from packaging, which in turn was adapted from
|
| 468 |
+
# http://code.activestate.com/recipes/146306
|
| 469 |
+
|
| 470 |
+
parts = []
|
| 471 |
+
boundary = self.boundary
|
| 472 |
+
for k, values in fields:
|
| 473 |
+
if not isinstance(values, (list, tuple)):
|
| 474 |
+
values = [values]
|
| 475 |
+
|
| 476 |
+
for v in values:
|
| 477 |
+
parts.extend((
|
| 478 |
+
b'--' + boundary,
|
| 479 |
+
('Content-Disposition: form-data; name="%s"' %
|
| 480 |
+
k).encode('utf-8'),
|
| 481 |
+
b'',
|
| 482 |
+
v.encode('utf-8')))
|
| 483 |
+
for key, filename, value in files:
|
| 484 |
+
parts.extend((
|
| 485 |
+
b'--' + boundary,
|
| 486 |
+
('Content-Disposition: form-data; name="%s"; filename="%s"' %
|
| 487 |
+
(key, filename)).encode('utf-8'),
|
| 488 |
+
b'',
|
| 489 |
+
value))
|
| 490 |
+
|
| 491 |
+
parts.extend((b'--' + boundary + b'--', b''))
|
| 492 |
+
|
| 493 |
+
body = b'\r\n'.join(parts)
|
| 494 |
+
ct = b'multipart/form-data; boundary=' + boundary
|
| 495 |
+
headers = {
|
| 496 |
+
'Content-type': ct,
|
| 497 |
+
'Content-length': str(len(body))
|
| 498 |
+
}
|
| 499 |
+
return Request(self.url, body, headers)
|
| 500 |
+
|
| 501 |
+
def search(self, terms, operator=None): # pragma: no cover
|
| 502 |
+
if isinstance(terms, string_types):
|
| 503 |
+
terms = {'name': terms}
|
| 504 |
+
rpc_proxy = ServerProxy(self.url, timeout=3.0)
|
| 505 |
+
try:
|
| 506 |
+
return rpc_proxy.search(terms, operator or 'and')
|
| 507 |
+
finally:
|
| 508 |
+
rpc_proxy('close')()
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/manifest.py
ADDED
|
@@ -0,0 +1,384 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
#
|
| 3 |
+
# Copyright (C) 2012-2023 Python Software Foundation.
|
| 4 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 5 |
+
#
|
| 6 |
+
"""
|
| 7 |
+
Class representing the list of files in a distribution.
|
| 8 |
+
|
| 9 |
+
Equivalent to distutils.filelist, but fixes some problems.
|
| 10 |
+
"""
|
| 11 |
+
import fnmatch
|
| 12 |
+
import logging
|
| 13 |
+
import os
|
| 14 |
+
import re
|
| 15 |
+
import sys
|
| 16 |
+
|
| 17 |
+
from . import DistlibException
|
| 18 |
+
from .compat import fsdecode
|
| 19 |
+
from .util import convert_path
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
__all__ = ['Manifest']
|
| 23 |
+
|
| 24 |
+
logger = logging.getLogger(__name__)
|
| 25 |
+
|
| 26 |
+
# a \ followed by some spaces + EOL
|
| 27 |
+
_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M)
|
| 28 |
+
_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
|
| 29 |
+
|
| 30 |
+
#
|
| 31 |
+
# Due to the different results returned by fnmatch.translate, we need
|
| 32 |
+
# to do slightly different processing for Python 2.7 and 3.2 ... this needed
|
| 33 |
+
# to be brought in for Python 3.6 onwards.
|
| 34 |
+
#
|
| 35 |
+
_PYTHON_VERSION = sys.version_info[:2]
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class Manifest(object):
|
| 39 |
+
"""
|
| 40 |
+
A list of files built by exploring the filesystem and filtered by applying various
|
| 41 |
+
patterns to what we find there.
|
| 42 |
+
"""
|
| 43 |
+
|
| 44 |
+
def __init__(self, base=None):
|
| 45 |
+
"""
|
| 46 |
+
Initialise an instance.
|
| 47 |
+
|
| 48 |
+
:param base: The base directory to explore under.
|
| 49 |
+
"""
|
| 50 |
+
self.base = os.path.abspath(os.path.normpath(base or os.getcwd()))
|
| 51 |
+
self.prefix = self.base + os.sep
|
| 52 |
+
self.allfiles = None
|
| 53 |
+
self.files = set()
|
| 54 |
+
|
| 55 |
+
#
|
| 56 |
+
# Public API
|
| 57 |
+
#
|
| 58 |
+
|
| 59 |
+
def findall(self):
|
| 60 |
+
"""Find all files under the base and set ``allfiles`` to the absolute
|
| 61 |
+
pathnames of files found.
|
| 62 |
+
"""
|
| 63 |
+
from stat import S_ISREG, S_ISDIR, S_ISLNK
|
| 64 |
+
|
| 65 |
+
self.allfiles = allfiles = []
|
| 66 |
+
root = self.base
|
| 67 |
+
stack = [root]
|
| 68 |
+
pop = stack.pop
|
| 69 |
+
push = stack.append
|
| 70 |
+
|
| 71 |
+
while stack:
|
| 72 |
+
root = pop()
|
| 73 |
+
names = os.listdir(root)
|
| 74 |
+
|
| 75 |
+
for name in names:
|
| 76 |
+
fullname = os.path.join(root, name)
|
| 77 |
+
|
| 78 |
+
# Avoid excess stat calls -- just one will do, thank you!
|
| 79 |
+
stat = os.stat(fullname)
|
| 80 |
+
mode = stat.st_mode
|
| 81 |
+
if S_ISREG(mode):
|
| 82 |
+
allfiles.append(fsdecode(fullname))
|
| 83 |
+
elif S_ISDIR(mode) and not S_ISLNK(mode):
|
| 84 |
+
push(fullname)
|
| 85 |
+
|
| 86 |
+
def add(self, item):
|
| 87 |
+
"""
|
| 88 |
+
Add a file to the manifest.
|
| 89 |
+
|
| 90 |
+
:param item: The pathname to add. This can be relative to the base.
|
| 91 |
+
"""
|
| 92 |
+
if not item.startswith(self.prefix):
|
| 93 |
+
item = os.path.join(self.base, item)
|
| 94 |
+
self.files.add(os.path.normpath(item))
|
| 95 |
+
|
| 96 |
+
def add_many(self, items):
|
| 97 |
+
"""
|
| 98 |
+
Add a list of files to the manifest.
|
| 99 |
+
|
| 100 |
+
:param items: The pathnames to add. These can be relative to the base.
|
| 101 |
+
"""
|
| 102 |
+
for item in items:
|
| 103 |
+
self.add(item)
|
| 104 |
+
|
| 105 |
+
def sorted(self, wantdirs=False):
|
| 106 |
+
"""
|
| 107 |
+
Return sorted files in directory order
|
| 108 |
+
"""
|
| 109 |
+
|
| 110 |
+
def add_dir(dirs, d):
|
| 111 |
+
dirs.add(d)
|
| 112 |
+
logger.debug('add_dir added %s', d)
|
| 113 |
+
if d != self.base:
|
| 114 |
+
parent, _ = os.path.split(d)
|
| 115 |
+
assert parent not in ('', '/')
|
| 116 |
+
add_dir(dirs, parent)
|
| 117 |
+
|
| 118 |
+
result = set(self.files) # make a copy!
|
| 119 |
+
if wantdirs:
|
| 120 |
+
dirs = set()
|
| 121 |
+
for f in result:
|
| 122 |
+
add_dir(dirs, os.path.dirname(f))
|
| 123 |
+
result |= dirs
|
| 124 |
+
return [os.path.join(*path_tuple) for path_tuple in
|
| 125 |
+
sorted(os.path.split(path) for path in result)]
|
| 126 |
+
|
| 127 |
+
def clear(self):
|
| 128 |
+
"""Clear all collected files."""
|
| 129 |
+
self.files = set()
|
| 130 |
+
self.allfiles = []
|
| 131 |
+
|
| 132 |
+
def process_directive(self, directive):
|
| 133 |
+
"""
|
| 134 |
+
Process a directive which either adds some files from ``allfiles`` to
|
| 135 |
+
``files``, or removes some files from ``files``.
|
| 136 |
+
|
| 137 |
+
:param directive: The directive to process. This should be in a format
|
| 138 |
+
compatible with distutils ``MANIFEST.in`` files:
|
| 139 |
+
|
| 140 |
+
http://docs.python.org/distutils/sourcedist.html#commands
|
| 141 |
+
"""
|
| 142 |
+
# Parse the line: split it up, make sure the right number of words
|
| 143 |
+
# is there, and return the relevant words. 'action' is always
|
| 144 |
+
# defined: it's the first word of the line. Which of the other
|
| 145 |
+
# three are defined depends on the action; it'll be either
|
| 146 |
+
# patterns, (dir and patterns), or (dirpattern).
|
| 147 |
+
action, patterns, thedir, dirpattern = self._parse_directive(directive)
|
| 148 |
+
|
| 149 |
+
# OK, now we know that the action is valid and we have the
|
| 150 |
+
# right number of words on the line for that action -- so we
|
| 151 |
+
# can proceed with minimal error-checking.
|
| 152 |
+
if action == 'include':
|
| 153 |
+
for pattern in patterns:
|
| 154 |
+
if not self._include_pattern(pattern, anchor=True):
|
| 155 |
+
logger.warning('no files found matching %r', pattern)
|
| 156 |
+
|
| 157 |
+
elif action == 'exclude':
|
| 158 |
+
for pattern in patterns:
|
| 159 |
+
self._exclude_pattern(pattern, anchor=True)
|
| 160 |
+
|
| 161 |
+
elif action == 'global-include':
|
| 162 |
+
for pattern in patterns:
|
| 163 |
+
if not self._include_pattern(pattern, anchor=False):
|
| 164 |
+
logger.warning('no files found matching %r '
|
| 165 |
+
'anywhere in distribution', pattern)
|
| 166 |
+
|
| 167 |
+
elif action == 'global-exclude':
|
| 168 |
+
for pattern in patterns:
|
| 169 |
+
self._exclude_pattern(pattern, anchor=False)
|
| 170 |
+
|
| 171 |
+
elif action == 'recursive-include':
|
| 172 |
+
for pattern in patterns:
|
| 173 |
+
if not self._include_pattern(pattern, prefix=thedir):
|
| 174 |
+
logger.warning('no files found matching %r '
|
| 175 |
+
'under directory %r', pattern, thedir)
|
| 176 |
+
|
| 177 |
+
elif action == 'recursive-exclude':
|
| 178 |
+
for pattern in patterns:
|
| 179 |
+
self._exclude_pattern(pattern, prefix=thedir)
|
| 180 |
+
|
| 181 |
+
elif action == 'graft':
|
| 182 |
+
if not self._include_pattern(None, prefix=dirpattern):
|
| 183 |
+
logger.warning('no directories found matching %r',
|
| 184 |
+
dirpattern)
|
| 185 |
+
|
| 186 |
+
elif action == 'prune':
|
| 187 |
+
if not self._exclude_pattern(None, prefix=dirpattern):
|
| 188 |
+
logger.warning('no previously-included directories found '
|
| 189 |
+
'matching %r', dirpattern)
|
| 190 |
+
else: # pragma: no cover
|
| 191 |
+
# This should never happen, as it should be caught in
|
| 192 |
+
# _parse_template_line
|
| 193 |
+
raise DistlibException(
|
| 194 |
+
'invalid action %r' % action)
|
| 195 |
+
|
| 196 |
+
#
|
| 197 |
+
# Private API
|
| 198 |
+
#
|
| 199 |
+
|
| 200 |
+
def _parse_directive(self, directive):
|
| 201 |
+
"""
|
| 202 |
+
Validate a directive.
|
| 203 |
+
:param directive: The directive to validate.
|
| 204 |
+
:return: A tuple of action, patterns, thedir, dir_patterns
|
| 205 |
+
"""
|
| 206 |
+
words = directive.split()
|
| 207 |
+
if len(words) == 1 and words[0] not in ('include', 'exclude',
|
| 208 |
+
'global-include',
|
| 209 |
+
'global-exclude',
|
| 210 |
+
'recursive-include',
|
| 211 |
+
'recursive-exclude',
|
| 212 |
+
'graft', 'prune'):
|
| 213 |
+
# no action given, let's use the default 'include'
|
| 214 |
+
words.insert(0, 'include')
|
| 215 |
+
|
| 216 |
+
action = words[0]
|
| 217 |
+
patterns = thedir = dir_pattern = None
|
| 218 |
+
|
| 219 |
+
if action in ('include', 'exclude',
|
| 220 |
+
'global-include', 'global-exclude'):
|
| 221 |
+
if len(words) < 2:
|
| 222 |
+
raise DistlibException(
|
| 223 |
+
'%r expects <pattern1> <pattern2> ...' % action)
|
| 224 |
+
|
| 225 |
+
patterns = [convert_path(word) for word in words[1:]]
|
| 226 |
+
|
| 227 |
+
elif action in ('recursive-include', 'recursive-exclude'):
|
| 228 |
+
if len(words) < 3:
|
| 229 |
+
raise DistlibException(
|
| 230 |
+
'%r expects <dir> <pattern1> <pattern2> ...' % action)
|
| 231 |
+
|
| 232 |
+
thedir = convert_path(words[1])
|
| 233 |
+
patterns = [convert_path(word) for word in words[2:]]
|
| 234 |
+
|
| 235 |
+
elif action in ('graft', 'prune'):
|
| 236 |
+
if len(words) != 2:
|
| 237 |
+
raise DistlibException(
|
| 238 |
+
'%r expects a single <dir_pattern>' % action)
|
| 239 |
+
|
| 240 |
+
dir_pattern = convert_path(words[1])
|
| 241 |
+
|
| 242 |
+
else:
|
| 243 |
+
raise DistlibException('unknown action %r' % action)
|
| 244 |
+
|
| 245 |
+
return action, patterns, thedir, dir_pattern
|
| 246 |
+
|
| 247 |
+
def _include_pattern(self, pattern, anchor=True, prefix=None,
|
| 248 |
+
is_regex=False):
|
| 249 |
+
"""Select strings (presumably filenames) from 'self.files' that
|
| 250 |
+
match 'pattern', a Unix-style wildcard (glob) pattern.
|
| 251 |
+
|
| 252 |
+
Patterns are not quite the same as implemented by the 'fnmatch'
|
| 253 |
+
module: '*' and '?' match non-special characters, where "special"
|
| 254 |
+
is platform-dependent: slash on Unix; colon, slash, and backslash on
|
| 255 |
+
DOS/Windows; and colon on Mac OS.
|
| 256 |
+
|
| 257 |
+
If 'anchor' is true (the default), then the pattern match is more
|
| 258 |
+
stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
|
| 259 |
+
'anchor' is false, both of these will match.
|
| 260 |
+
|
| 261 |
+
If 'prefix' is supplied, then only filenames starting with 'prefix'
|
| 262 |
+
(itself a pattern) and ending with 'pattern', with anything in between
|
| 263 |
+
them, will match. 'anchor' is ignored in this case.
|
| 264 |
+
|
| 265 |
+
If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
|
| 266 |
+
'pattern' is assumed to be either a string containing a regex or a
|
| 267 |
+
regex object -- no translation is done, the regex is just compiled
|
| 268 |
+
and used as-is.
|
| 269 |
+
|
| 270 |
+
Selected strings will be added to self.files.
|
| 271 |
+
|
| 272 |
+
Return True if files are found.
|
| 273 |
+
"""
|
| 274 |
+
# XXX docstring lying about what the special chars are?
|
| 275 |
+
found = False
|
| 276 |
+
pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
|
| 277 |
+
|
| 278 |
+
# delayed loading of allfiles list
|
| 279 |
+
if self.allfiles is None:
|
| 280 |
+
self.findall()
|
| 281 |
+
|
| 282 |
+
for name in self.allfiles:
|
| 283 |
+
if pattern_re.search(name):
|
| 284 |
+
self.files.add(name)
|
| 285 |
+
found = True
|
| 286 |
+
return found
|
| 287 |
+
|
| 288 |
+
def _exclude_pattern(self, pattern, anchor=True, prefix=None,
|
| 289 |
+
is_regex=False):
|
| 290 |
+
"""Remove strings (presumably filenames) from 'files' that match
|
| 291 |
+
'pattern'.
|
| 292 |
+
|
| 293 |
+
Other parameters are the same as for 'include_pattern()', above.
|
| 294 |
+
The list 'self.files' is modified in place. Return True if files are
|
| 295 |
+
found.
|
| 296 |
+
|
| 297 |
+
This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
|
| 298 |
+
packaging source distributions
|
| 299 |
+
"""
|
| 300 |
+
found = False
|
| 301 |
+
pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
|
| 302 |
+
for f in list(self.files):
|
| 303 |
+
if pattern_re.search(f):
|
| 304 |
+
self.files.remove(f)
|
| 305 |
+
found = True
|
| 306 |
+
return found
|
| 307 |
+
|
| 308 |
+
def _translate_pattern(self, pattern, anchor=True, prefix=None,
|
| 309 |
+
is_regex=False):
|
| 310 |
+
"""Translate a shell-like wildcard pattern to a compiled regular
|
| 311 |
+
expression.
|
| 312 |
+
|
| 313 |
+
Return the compiled regex. If 'is_regex' true,
|
| 314 |
+
then 'pattern' is directly compiled to a regex (if it's a string)
|
| 315 |
+
or just returned as-is (assumes it's a regex object).
|
| 316 |
+
"""
|
| 317 |
+
if is_regex:
|
| 318 |
+
if isinstance(pattern, str):
|
| 319 |
+
return re.compile(pattern)
|
| 320 |
+
else:
|
| 321 |
+
return pattern
|
| 322 |
+
|
| 323 |
+
if _PYTHON_VERSION > (3, 2):
|
| 324 |
+
# ditch start and end characters
|
| 325 |
+
start, _, end = self._glob_to_re('_').partition('_')
|
| 326 |
+
|
| 327 |
+
if pattern:
|
| 328 |
+
pattern_re = self._glob_to_re(pattern)
|
| 329 |
+
if _PYTHON_VERSION > (3, 2):
|
| 330 |
+
assert pattern_re.startswith(start) and pattern_re.endswith(end)
|
| 331 |
+
else:
|
| 332 |
+
pattern_re = ''
|
| 333 |
+
|
| 334 |
+
base = re.escape(os.path.join(self.base, ''))
|
| 335 |
+
if prefix is not None:
|
| 336 |
+
# ditch end of pattern character
|
| 337 |
+
if _PYTHON_VERSION <= (3, 2):
|
| 338 |
+
empty_pattern = self._glob_to_re('')
|
| 339 |
+
prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]
|
| 340 |
+
else:
|
| 341 |
+
prefix_re = self._glob_to_re(prefix)
|
| 342 |
+
assert prefix_re.startswith(start) and prefix_re.endswith(end)
|
| 343 |
+
prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
|
| 344 |
+
sep = os.sep
|
| 345 |
+
if os.sep == '\\':
|
| 346 |
+
sep = r'\\'
|
| 347 |
+
if _PYTHON_VERSION <= (3, 2):
|
| 348 |
+
pattern_re = '^' + base + sep.join((prefix_re,
|
| 349 |
+
'.*' + pattern_re))
|
| 350 |
+
else:
|
| 351 |
+
pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
|
| 352 |
+
pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep,
|
| 353 |
+
pattern_re, end)
|
| 354 |
+
else: # no prefix -- respect anchor flag
|
| 355 |
+
if anchor:
|
| 356 |
+
if _PYTHON_VERSION <= (3, 2):
|
| 357 |
+
pattern_re = '^' + base + pattern_re
|
| 358 |
+
else:
|
| 359 |
+
pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):])
|
| 360 |
+
|
| 361 |
+
return re.compile(pattern_re)
|
| 362 |
+
|
| 363 |
+
def _glob_to_re(self, pattern):
|
| 364 |
+
"""Translate a shell-like glob pattern to a regular expression.
|
| 365 |
+
|
| 366 |
+
Return a string containing the regex. Differs from
|
| 367 |
+
'fnmatch.translate()' in that '*' does not match "special characters"
|
| 368 |
+
(which are platform-specific).
|
| 369 |
+
"""
|
| 370 |
+
pattern_re = fnmatch.translate(pattern)
|
| 371 |
+
|
| 372 |
+
# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
|
| 373 |
+
# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
|
| 374 |
+
# and by extension they shouldn't match such "special characters" under
|
| 375 |
+
# any OS. So change all non-escaped dots in the RE to match any
|
| 376 |
+
# character except the special characters (currently: just os.sep).
|
| 377 |
+
sep = os.sep
|
| 378 |
+
if os.sep == '\\':
|
| 379 |
+
# we're using a regex to manipulate a regex, so we need
|
| 380 |
+
# to escape the backslash twice
|
| 381 |
+
sep = r'\\\\'
|
| 382 |
+
escaped = r'\1[^%s]' % sep
|
| 383 |
+
pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
|
| 384 |
+
return pattern_re
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/markers.py
ADDED
|
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
#
|
| 3 |
+
# Copyright (C) 2012-2023 Vinay Sajip.
|
| 4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
| 5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 6 |
+
#
|
| 7 |
+
"""
|
| 8 |
+
Parser for the environment markers micro-language defined in PEP 508.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
# Note: In PEP 345, the micro-language was Python compatible, so the ast
|
| 12 |
+
# module could be used to parse it. However, PEP 508 introduced operators such
|
| 13 |
+
# as ~= and === which aren't in Python, necessitating a different approach.
|
| 14 |
+
|
| 15 |
+
import os
|
| 16 |
+
import re
|
| 17 |
+
import sys
|
| 18 |
+
import platform
|
| 19 |
+
|
| 20 |
+
from .compat import string_types
|
| 21 |
+
from .util import in_venv, parse_marker
|
| 22 |
+
from .version import LegacyVersion as LV
|
| 23 |
+
|
| 24 |
+
__all__ = ['interpret']
|
| 25 |
+
|
| 26 |
+
_VERSION_PATTERN = re.compile(r'((\d+(\.\d+)*\w*)|\'(\d+(\.\d+)*\w*)\'|\"(\d+(\.\d+)*\w*)\")')
|
| 27 |
+
_VERSION_MARKERS = {'python_version', 'python_full_version'}
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def _is_version_marker(s):
|
| 31 |
+
return isinstance(s, string_types) and s in _VERSION_MARKERS
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def _is_literal(o):
|
| 35 |
+
if not isinstance(o, string_types) or not o:
|
| 36 |
+
return False
|
| 37 |
+
return o[0] in '\'"'
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def _get_versions(s):
|
| 41 |
+
return {LV(m.groups()[0]) for m in _VERSION_PATTERN.finditer(s)}
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class Evaluator(object):
|
| 45 |
+
"""
|
| 46 |
+
This class is used to evaluate marker expressions.
|
| 47 |
+
"""
|
| 48 |
+
|
| 49 |
+
operations = {
|
| 50 |
+
'==': lambda x, y: x == y,
|
| 51 |
+
'===': lambda x, y: x == y,
|
| 52 |
+
'~=': lambda x, y: x == y or x > y,
|
| 53 |
+
'!=': lambda x, y: x != y,
|
| 54 |
+
'<': lambda x, y: x < y,
|
| 55 |
+
'<=': lambda x, y: x == y or x < y,
|
| 56 |
+
'>': lambda x, y: x > y,
|
| 57 |
+
'>=': lambda x, y: x == y or x > y,
|
| 58 |
+
'and': lambda x, y: x and y,
|
| 59 |
+
'or': lambda x, y: x or y,
|
| 60 |
+
'in': lambda x, y: x in y,
|
| 61 |
+
'not in': lambda x, y: x not in y,
|
| 62 |
+
}
|
| 63 |
+
|
| 64 |
+
def evaluate(self, expr, context):
|
| 65 |
+
"""
|
| 66 |
+
Evaluate a marker expression returned by the :func:`parse_requirement`
|
| 67 |
+
function in the specified context.
|
| 68 |
+
"""
|
| 69 |
+
if isinstance(expr, string_types):
|
| 70 |
+
if expr[0] in '\'"':
|
| 71 |
+
result = expr[1:-1]
|
| 72 |
+
else:
|
| 73 |
+
if expr not in context:
|
| 74 |
+
raise SyntaxError('unknown variable: %s' % expr)
|
| 75 |
+
result = context[expr]
|
| 76 |
+
else:
|
| 77 |
+
assert isinstance(expr, dict)
|
| 78 |
+
op = expr['op']
|
| 79 |
+
if op not in self.operations:
|
| 80 |
+
raise NotImplementedError('op not implemented: %s' % op)
|
| 81 |
+
elhs = expr['lhs']
|
| 82 |
+
erhs = expr['rhs']
|
| 83 |
+
if _is_literal(expr['lhs']) and _is_literal(expr['rhs']):
|
| 84 |
+
raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs))
|
| 85 |
+
|
| 86 |
+
lhs = self.evaluate(elhs, context)
|
| 87 |
+
rhs = self.evaluate(erhs, context)
|
| 88 |
+
if ((_is_version_marker(elhs) or _is_version_marker(erhs)) and
|
| 89 |
+
op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')):
|
| 90 |
+
lhs = LV(lhs)
|
| 91 |
+
rhs = LV(rhs)
|
| 92 |
+
elif _is_version_marker(elhs) and op in ('in', 'not in'):
|
| 93 |
+
lhs = LV(lhs)
|
| 94 |
+
rhs = _get_versions(rhs)
|
| 95 |
+
result = self.operations[op](lhs, rhs)
|
| 96 |
+
return result
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
_DIGITS = re.compile(r'\d+\.\d+')
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def default_context():
|
| 103 |
+
|
| 104 |
+
def format_full_version(info):
|
| 105 |
+
version = '%s.%s.%s' % (info.major, info.minor, info.micro)
|
| 106 |
+
kind = info.releaselevel
|
| 107 |
+
if kind != 'final':
|
| 108 |
+
version += kind[0] + str(info.serial)
|
| 109 |
+
return version
|
| 110 |
+
|
| 111 |
+
if hasattr(sys, 'implementation'):
|
| 112 |
+
implementation_version = format_full_version(sys.implementation.version)
|
| 113 |
+
implementation_name = sys.implementation.name
|
| 114 |
+
else:
|
| 115 |
+
implementation_version = '0'
|
| 116 |
+
implementation_name = ''
|
| 117 |
+
|
| 118 |
+
ppv = platform.python_version()
|
| 119 |
+
m = _DIGITS.match(ppv)
|
| 120 |
+
pv = m.group(0)
|
| 121 |
+
result = {
|
| 122 |
+
'implementation_name': implementation_name,
|
| 123 |
+
'implementation_version': implementation_version,
|
| 124 |
+
'os_name': os.name,
|
| 125 |
+
'platform_machine': platform.machine(),
|
| 126 |
+
'platform_python_implementation': platform.python_implementation(),
|
| 127 |
+
'platform_release': platform.release(),
|
| 128 |
+
'platform_system': platform.system(),
|
| 129 |
+
'platform_version': platform.version(),
|
| 130 |
+
'platform_in_venv': str(in_venv()),
|
| 131 |
+
'python_full_version': ppv,
|
| 132 |
+
'python_version': pv,
|
| 133 |
+
'sys_platform': sys.platform,
|
| 134 |
+
}
|
| 135 |
+
return result
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
DEFAULT_CONTEXT = default_context()
|
| 139 |
+
del default_context
|
| 140 |
+
|
| 141 |
+
evaluator = Evaluator()
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def interpret(marker, execution_context=None):
|
| 145 |
+
"""
|
| 146 |
+
Interpret a marker and return a result depending on environment.
|
| 147 |
+
|
| 148 |
+
:param marker: The marker to interpret.
|
| 149 |
+
:type marker: str
|
| 150 |
+
:param execution_context: The context used for name lookup.
|
| 151 |
+
:type execution_context: mapping
|
| 152 |
+
"""
|
| 153 |
+
try:
|
| 154 |
+
expr, rest = parse_marker(marker)
|
| 155 |
+
except Exception as e:
|
| 156 |
+
raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e))
|
| 157 |
+
if rest and rest[0] != '#':
|
| 158 |
+
raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest))
|
| 159 |
+
context = dict(DEFAULT_CONTEXT)
|
| 160 |
+
if execution_context:
|
| 161 |
+
context.update(execution_context)
|
| 162 |
+
return evaluator.evaluate(expr, context)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/metadata.py
ADDED
|
@@ -0,0 +1,1031 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
#
|
| 3 |
+
# Copyright (C) 2012 The Python Software Foundation.
|
| 4 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 5 |
+
#
|
| 6 |
+
"""Implementation of the Metadata for Python packages PEPs.
|
| 7 |
+
|
| 8 |
+
Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and 2.2).
|
| 9 |
+
"""
|
| 10 |
+
from __future__ import unicode_literals
|
| 11 |
+
|
| 12 |
+
import codecs
|
| 13 |
+
from email import message_from_file
|
| 14 |
+
import json
|
| 15 |
+
import logging
|
| 16 |
+
import re
|
| 17 |
+
|
| 18 |
+
from . import DistlibException, __version__
|
| 19 |
+
from .compat import StringIO, string_types, text_type
|
| 20 |
+
from .markers import interpret
|
| 21 |
+
from .util import extract_by_key, get_extras
|
| 22 |
+
from .version import get_scheme, PEP440_VERSION_RE
|
| 23 |
+
|
| 24 |
+
logger = logging.getLogger(__name__)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class MetadataMissingError(DistlibException):
|
| 28 |
+
"""A required metadata is missing"""
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class MetadataConflictError(DistlibException):
|
| 32 |
+
"""Attempt to read or write metadata fields that are conflictual."""
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class MetadataUnrecognizedVersionError(DistlibException):
|
| 36 |
+
"""Unknown metadata version number."""
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class MetadataInvalidError(DistlibException):
|
| 40 |
+
"""A metadata value is invalid"""
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
# public API of this module
|
| 44 |
+
__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']
|
| 45 |
+
|
| 46 |
+
# Encoding used for the PKG-INFO files
|
| 47 |
+
PKG_INFO_ENCODING = 'utf-8'
|
| 48 |
+
|
| 49 |
+
# preferred version. Hopefully will be changed
|
| 50 |
+
# to 1.2 once PEP 345 is supported everywhere
|
| 51 |
+
PKG_INFO_PREFERRED_VERSION = '1.1'
|
| 52 |
+
|
| 53 |
+
_LINE_PREFIX_1_2 = re.compile('\n \\|')
|
| 54 |
+
_LINE_PREFIX_PRE_1_2 = re.compile('\n ')
|
| 55 |
+
_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Summary', 'Description', 'Keywords', 'Home-page',
|
| 56 |
+
'Author', 'Author-email', 'License')
|
| 57 |
+
|
| 58 |
+
_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform', 'Summary', 'Description',
|
| 59 |
+
'Keywords', 'Home-page', 'Author', 'Author-email', 'License', 'Classifier', 'Download-URL', 'Obsoletes',
|
| 60 |
+
'Provides', 'Requires')
|
| 61 |
+
|
| 62 |
+
_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', 'Download-URL')
|
| 63 |
+
|
| 64 |
+
_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform', 'Summary', 'Description',
|
| 65 |
+
'Keywords', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License',
|
| 66 |
+
'Classifier', 'Download-URL', 'Obsoletes-Dist', 'Project-URL', 'Provides-Dist', 'Requires-Dist',
|
| 67 |
+
'Requires-Python', 'Requires-External')
|
| 68 |
+
|
| 69 |
+
_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', 'Obsoletes-Dist', 'Requires-External',
|
| 70 |
+
'Maintainer', 'Maintainer-email', 'Project-URL')
|
| 71 |
+
|
| 72 |
+
_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform', 'Summary', 'Description',
|
| 73 |
+
'Keywords', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License',
|
| 74 |
+
'Classifier', 'Download-URL', 'Obsoletes-Dist', 'Project-URL', 'Provides-Dist', 'Requires-Dist',
|
| 75 |
+
'Requires-Python', 'Requires-External', 'Private-Version', 'Obsoleted-By', 'Setup-Requires-Dist',
|
| 76 |
+
'Extension', 'Provides-Extra')
|
| 77 |
+
|
| 78 |
+
_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension')
|
| 79 |
+
|
| 80 |
+
# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in
|
| 81 |
+
# the metadata. Include them in the tuple literal below to allow them
|
| 82 |
+
# (for now).
|
| 83 |
+
# Ditto for Obsoletes - see issue #140.
|
| 84 |
+
_566_FIELDS = _426_FIELDS + ('Description-Content-Type', 'Requires', 'Provides', 'Obsoletes')
|
| 85 |
+
|
| 86 |
+
_566_MARKERS = ('Description-Content-Type', )
|
| 87 |
+
|
| 88 |
+
_643_MARKERS = ('Dynamic', 'License-File')
|
| 89 |
+
|
| 90 |
+
_643_FIELDS = _566_FIELDS + _643_MARKERS
|
| 91 |
+
|
| 92 |
+
_ALL_FIELDS = set()
|
| 93 |
+
_ALL_FIELDS.update(_241_FIELDS)
|
| 94 |
+
_ALL_FIELDS.update(_314_FIELDS)
|
| 95 |
+
_ALL_FIELDS.update(_345_FIELDS)
|
| 96 |
+
_ALL_FIELDS.update(_426_FIELDS)
|
| 97 |
+
_ALL_FIELDS.update(_566_FIELDS)
|
| 98 |
+
_ALL_FIELDS.update(_643_FIELDS)
|
| 99 |
+
|
| 100 |
+
EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''')
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def _version2fieldlist(version):
|
| 104 |
+
if version == '1.0':
|
| 105 |
+
return _241_FIELDS
|
| 106 |
+
elif version == '1.1':
|
| 107 |
+
return _314_FIELDS
|
| 108 |
+
elif version == '1.2':
|
| 109 |
+
return _345_FIELDS
|
| 110 |
+
elif version in ('1.3', '2.1'):
|
| 111 |
+
# avoid adding field names if already there
|
| 112 |
+
return _345_FIELDS + tuple(f for f in _566_FIELDS if f not in _345_FIELDS)
|
| 113 |
+
elif version == '2.0':
|
| 114 |
+
raise ValueError('Metadata 2.0 is withdrawn and not supported')
|
| 115 |
+
# return _426_FIELDS
|
| 116 |
+
elif version == '2.2':
|
| 117 |
+
return _643_FIELDS
|
| 118 |
+
raise MetadataUnrecognizedVersionError(version)
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def _best_version(fields):
|
| 122 |
+
"""Detect the best version depending on the fields used."""
|
| 123 |
+
|
| 124 |
+
def _has_marker(keys, markers):
|
| 125 |
+
return any(marker in keys for marker in markers)
|
| 126 |
+
|
| 127 |
+
keys = [key for key, value in fields.items() if value not in ([], 'UNKNOWN', None)]
|
| 128 |
+
possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.1', '2.2'] # 2.0 removed
|
| 129 |
+
|
| 130 |
+
# first let's try to see if a field is not part of one of the version
|
| 131 |
+
for key in keys:
|
| 132 |
+
if key not in _241_FIELDS and '1.0' in possible_versions:
|
| 133 |
+
possible_versions.remove('1.0')
|
| 134 |
+
logger.debug('Removed 1.0 due to %s', key)
|
| 135 |
+
if key not in _314_FIELDS and '1.1' in possible_versions:
|
| 136 |
+
possible_versions.remove('1.1')
|
| 137 |
+
logger.debug('Removed 1.1 due to %s', key)
|
| 138 |
+
if key not in _345_FIELDS and '1.2' in possible_versions:
|
| 139 |
+
possible_versions.remove('1.2')
|
| 140 |
+
logger.debug('Removed 1.2 due to %s', key)
|
| 141 |
+
if key not in _566_FIELDS and '1.3' in possible_versions:
|
| 142 |
+
possible_versions.remove('1.3')
|
| 143 |
+
logger.debug('Removed 1.3 due to %s', key)
|
| 144 |
+
if key not in _566_FIELDS and '2.1' in possible_versions:
|
| 145 |
+
if key != 'Description': # In 2.1, description allowed after headers
|
| 146 |
+
possible_versions.remove('2.1')
|
| 147 |
+
logger.debug('Removed 2.1 due to %s', key)
|
| 148 |
+
if key not in _643_FIELDS and '2.2' in possible_versions:
|
| 149 |
+
possible_versions.remove('2.2')
|
| 150 |
+
logger.debug('Removed 2.2 due to %s', key)
|
| 151 |
+
# if key not in _426_FIELDS and '2.0' in possible_versions:
|
| 152 |
+
# possible_versions.remove('2.0')
|
| 153 |
+
# logger.debug('Removed 2.0 due to %s', key)
|
| 154 |
+
|
| 155 |
+
# possible_version contains qualified versions
|
| 156 |
+
if len(possible_versions) == 1:
|
| 157 |
+
return possible_versions[0] # found !
|
| 158 |
+
elif len(possible_versions) == 0:
|
| 159 |
+
logger.debug('Out of options - unknown metadata set: %s', fields)
|
| 160 |
+
raise MetadataConflictError('Unknown metadata set')
|
| 161 |
+
|
| 162 |
+
# let's see if one unique marker is found
|
| 163 |
+
is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)
|
| 164 |
+
is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)
|
| 165 |
+
is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS)
|
| 166 |
+
# is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS)
|
| 167 |
+
is_2_2 = '2.2' in possible_versions and _has_marker(keys, _643_MARKERS)
|
| 168 |
+
if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_2) > 1:
|
| 169 |
+
raise MetadataConflictError('You used incompatible 1.1/1.2/2.1/2.2 fields')
|
| 170 |
+
|
| 171 |
+
# we have the choice, 1.0, or 1.2, 2.1 or 2.2
|
| 172 |
+
# - 1.0 has a broken Summary field but works with all tools
|
| 173 |
+
# - 1.1 is to avoid
|
| 174 |
+
# - 1.2 fixes Summary but has little adoption
|
| 175 |
+
# - 2.1 adds more features
|
| 176 |
+
# - 2.2 is the latest
|
| 177 |
+
if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_2:
|
| 178 |
+
# we couldn't find any specific marker
|
| 179 |
+
if PKG_INFO_PREFERRED_VERSION in possible_versions:
|
| 180 |
+
return PKG_INFO_PREFERRED_VERSION
|
| 181 |
+
if is_1_1:
|
| 182 |
+
return '1.1'
|
| 183 |
+
if is_1_2:
|
| 184 |
+
return '1.2'
|
| 185 |
+
if is_2_1:
|
| 186 |
+
return '2.1'
|
| 187 |
+
# if is_2_2:
|
| 188 |
+
# return '2.2'
|
| 189 |
+
|
| 190 |
+
return '2.2'
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
# This follows the rules about transforming keys as described in
|
| 194 |
+
# https://www.python.org/dev/peps/pep-0566/#id17
|
| 195 |
+
_ATTR2FIELD = {name.lower().replace("-", "_"): name for name in _ALL_FIELDS}
|
| 196 |
+
_FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()}
|
| 197 |
+
|
| 198 |
+
_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist')
|
| 199 |
+
_VERSIONS_FIELDS = ('Requires-Python', )
|
| 200 |
+
_VERSION_FIELDS = ('Version', )
|
| 201 |
+
_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', 'Requires', 'Provides', 'Obsoletes-Dist', 'Provides-Dist',
|
| 202 |
+
'Requires-Dist', 'Requires-External', 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist',
|
| 203 |
+
'Provides-Extra', 'Extension', 'License-File')
|
| 204 |
+
_LISTTUPLEFIELDS = ('Project-URL', )
|
| 205 |
+
|
| 206 |
+
_ELEMENTSFIELD = ('Keywords', )
|
| 207 |
+
|
| 208 |
+
_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')
|
| 209 |
+
|
| 210 |
+
_MISSING = object()
|
| 211 |
+
|
| 212 |
+
_FILESAFE = re.compile('[^A-Za-z0-9.]+')
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
def _get_name_and_version(name, version, for_filename=False):
|
| 216 |
+
"""Return the distribution name with version.
|
| 217 |
+
|
| 218 |
+
If for_filename is true, return a filename-escaped form."""
|
| 219 |
+
if for_filename:
|
| 220 |
+
# For both name and version any runs of non-alphanumeric or '.'
|
| 221 |
+
# characters are replaced with a single '-'. Additionally any
|
| 222 |
+
# spaces in the version string become '.'
|
| 223 |
+
name = _FILESAFE.sub('-', name)
|
| 224 |
+
version = _FILESAFE.sub('-', version.replace(' ', '.'))
|
| 225 |
+
return '%s-%s' % (name, version)
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
class LegacyMetadata(object):
|
| 229 |
+
"""The legacy metadata of a release.
|
| 230 |
+
|
| 231 |
+
Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can
|
| 232 |
+
instantiate the class with one of these arguments (or none):
|
| 233 |
+
- *path*, the path to a metadata file
|
| 234 |
+
- *fileobj* give a file-like object with metadata as content
|
| 235 |
+
- *mapping* is a dict-like object
|
| 236 |
+
- *scheme* is a version scheme name
|
| 237 |
+
"""
|
| 238 |
+
|
| 239 |
+
# TODO document the mapping API and UNKNOWN default key
|
| 240 |
+
|
| 241 |
+
def __init__(self, path=None, fileobj=None, mapping=None, scheme='default'):
|
| 242 |
+
if [path, fileobj, mapping].count(None) < 2:
|
| 243 |
+
raise TypeError('path, fileobj and mapping are exclusive')
|
| 244 |
+
self._fields = {}
|
| 245 |
+
self.requires_files = []
|
| 246 |
+
self._dependencies = None
|
| 247 |
+
self.scheme = scheme
|
| 248 |
+
if path is not None:
|
| 249 |
+
self.read(path)
|
| 250 |
+
elif fileobj is not None:
|
| 251 |
+
self.read_file(fileobj)
|
| 252 |
+
elif mapping is not None:
|
| 253 |
+
self.update(mapping)
|
| 254 |
+
self.set_metadata_version()
|
| 255 |
+
|
| 256 |
+
def set_metadata_version(self):
|
| 257 |
+
self._fields['Metadata-Version'] = _best_version(self._fields)
|
| 258 |
+
|
| 259 |
+
def _write_field(self, fileobj, name, value):
|
| 260 |
+
fileobj.write('%s: %s\n' % (name, value))
|
| 261 |
+
|
| 262 |
+
def __getitem__(self, name):
|
| 263 |
+
return self.get(name)
|
| 264 |
+
|
| 265 |
+
def __setitem__(self, name, value):
|
| 266 |
+
return self.set(name, value)
|
| 267 |
+
|
| 268 |
+
def __delitem__(self, name):
|
| 269 |
+
field_name = self._convert_name(name)
|
| 270 |
+
try:
|
| 271 |
+
del self._fields[field_name]
|
| 272 |
+
except KeyError:
|
| 273 |
+
raise KeyError(name)
|
| 274 |
+
|
| 275 |
+
def __contains__(self, name):
|
| 276 |
+
return (name in self._fields or self._convert_name(name) in self._fields)
|
| 277 |
+
|
| 278 |
+
def _convert_name(self, name):
|
| 279 |
+
if name in _ALL_FIELDS:
|
| 280 |
+
return name
|
| 281 |
+
name = name.replace('-', '_').lower()
|
| 282 |
+
return _ATTR2FIELD.get(name, name)
|
| 283 |
+
|
| 284 |
+
def _default_value(self, name):
|
| 285 |
+
if name in _LISTFIELDS or name in _ELEMENTSFIELD:
|
| 286 |
+
return []
|
| 287 |
+
return 'UNKNOWN'
|
| 288 |
+
|
| 289 |
+
def _remove_line_prefix(self, value):
|
| 290 |
+
if self.metadata_version in ('1.0', '1.1'):
|
| 291 |
+
return _LINE_PREFIX_PRE_1_2.sub('\n', value)
|
| 292 |
+
else:
|
| 293 |
+
return _LINE_PREFIX_1_2.sub('\n', value)
|
| 294 |
+
|
| 295 |
+
def __getattr__(self, name):
|
| 296 |
+
if name in _ATTR2FIELD:
|
| 297 |
+
return self[name]
|
| 298 |
+
raise AttributeError(name)
|
| 299 |
+
|
| 300 |
+
#
|
| 301 |
+
# Public API
|
| 302 |
+
#
|
| 303 |
+
|
| 304 |
+
def get_fullname(self, filesafe=False):
|
| 305 |
+
"""
|
| 306 |
+
Return the distribution name with version.
|
| 307 |
+
|
| 308 |
+
If filesafe is true, return a filename-escaped form.
|
| 309 |
+
"""
|
| 310 |
+
return _get_name_and_version(self['Name'], self['Version'], filesafe)
|
| 311 |
+
|
| 312 |
+
def is_field(self, name):
|
| 313 |
+
"""return True if name is a valid metadata key"""
|
| 314 |
+
name = self._convert_name(name)
|
| 315 |
+
return name in _ALL_FIELDS
|
| 316 |
+
|
| 317 |
+
def is_multi_field(self, name):
|
| 318 |
+
name = self._convert_name(name)
|
| 319 |
+
return name in _LISTFIELDS
|
| 320 |
+
|
| 321 |
+
def read(self, filepath):
|
| 322 |
+
"""Read the metadata values from a file path."""
|
| 323 |
+
fp = codecs.open(filepath, 'r', encoding='utf-8')
|
| 324 |
+
try:
|
| 325 |
+
self.read_file(fp)
|
| 326 |
+
finally:
|
| 327 |
+
fp.close()
|
| 328 |
+
|
| 329 |
+
def read_file(self, fileob):
|
| 330 |
+
"""Read the metadata values from a file object."""
|
| 331 |
+
msg = message_from_file(fileob)
|
| 332 |
+
self._fields['Metadata-Version'] = msg['metadata-version']
|
| 333 |
+
|
| 334 |
+
# When reading, get all the fields we can
|
| 335 |
+
for field in _ALL_FIELDS:
|
| 336 |
+
if field not in msg:
|
| 337 |
+
continue
|
| 338 |
+
if field in _LISTFIELDS:
|
| 339 |
+
# we can have multiple lines
|
| 340 |
+
values = msg.get_all(field)
|
| 341 |
+
if field in _LISTTUPLEFIELDS and values is not None:
|
| 342 |
+
values = [tuple(value.split(',')) for value in values]
|
| 343 |
+
self.set(field, values)
|
| 344 |
+
else:
|
| 345 |
+
# single line
|
| 346 |
+
value = msg[field]
|
| 347 |
+
if value is not None and value != 'UNKNOWN':
|
| 348 |
+
self.set(field, value)
|
| 349 |
+
|
| 350 |
+
# PEP 566 specifies that the body be used for the description, if
|
| 351 |
+
# available
|
| 352 |
+
body = msg.get_payload()
|
| 353 |
+
self["Description"] = body if body else self["Description"]
|
| 354 |
+
# logger.debug('Attempting to set metadata for %s', self)
|
| 355 |
+
# self.set_metadata_version()
|
| 356 |
+
|
| 357 |
+
def write(self, filepath, skip_unknown=False):
|
| 358 |
+
"""Write the metadata fields to filepath."""
|
| 359 |
+
fp = codecs.open(filepath, 'w', encoding='utf-8')
|
| 360 |
+
try:
|
| 361 |
+
self.write_file(fp, skip_unknown)
|
| 362 |
+
finally:
|
| 363 |
+
fp.close()
|
| 364 |
+
|
| 365 |
+
def write_file(self, fileobject, skip_unknown=False):
|
| 366 |
+
"""Write the PKG-INFO format data to a file object."""
|
| 367 |
+
self.set_metadata_version()
|
| 368 |
+
|
| 369 |
+
for field in _version2fieldlist(self['Metadata-Version']):
|
| 370 |
+
values = self.get(field)
|
| 371 |
+
if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']):
|
| 372 |
+
continue
|
| 373 |
+
if field in _ELEMENTSFIELD:
|
| 374 |
+
self._write_field(fileobject, field, ','.join(values))
|
| 375 |
+
continue
|
| 376 |
+
if field not in _LISTFIELDS:
|
| 377 |
+
if field == 'Description':
|
| 378 |
+
if self.metadata_version in ('1.0', '1.1'):
|
| 379 |
+
values = values.replace('\n', '\n ')
|
| 380 |
+
else:
|
| 381 |
+
values = values.replace('\n', '\n |')
|
| 382 |
+
values = [values]
|
| 383 |
+
|
| 384 |
+
if field in _LISTTUPLEFIELDS:
|
| 385 |
+
values = [','.join(value) for value in values]
|
| 386 |
+
|
| 387 |
+
for value in values:
|
| 388 |
+
self._write_field(fileobject, field, value)
|
| 389 |
+
|
| 390 |
+
def update(self, other=None, **kwargs):
|
| 391 |
+
"""Set metadata values from the given iterable `other` and kwargs.
|
| 392 |
+
|
| 393 |
+
Behavior is like `dict.update`: If `other` has a ``keys`` method,
|
| 394 |
+
they are looped over and ``self[key]`` is assigned ``other[key]``.
|
| 395 |
+
Else, ``other`` is an iterable of ``(key, value)`` iterables.
|
| 396 |
+
|
| 397 |
+
Keys that don't match a metadata field or that have an empty value are
|
| 398 |
+
dropped.
|
| 399 |
+
"""
|
| 400 |
+
|
| 401 |
+
def _set(key, value):
|
| 402 |
+
if key in _ATTR2FIELD and value:
|
| 403 |
+
self.set(self._convert_name(key), value)
|
| 404 |
+
|
| 405 |
+
if not other:
|
| 406 |
+
# other is None or empty container
|
| 407 |
+
pass
|
| 408 |
+
elif hasattr(other, 'keys'):
|
| 409 |
+
for k in other.keys():
|
| 410 |
+
_set(k, other[k])
|
| 411 |
+
else:
|
| 412 |
+
for k, v in other:
|
| 413 |
+
_set(k, v)
|
| 414 |
+
|
| 415 |
+
if kwargs:
|
| 416 |
+
for k, v in kwargs.items():
|
| 417 |
+
_set(k, v)
|
| 418 |
+
|
| 419 |
+
def set(self, name, value):
|
| 420 |
+
"""Control then set a metadata field."""
|
| 421 |
+
name = self._convert_name(name)
|
| 422 |
+
|
| 423 |
+
if ((name in _ELEMENTSFIELD or name == 'Platform') and not isinstance(value, (list, tuple))):
|
| 424 |
+
if isinstance(value, string_types):
|
| 425 |
+
value = [v.strip() for v in value.split(',')]
|
| 426 |
+
else:
|
| 427 |
+
value = []
|
| 428 |
+
elif (name in _LISTFIELDS and not isinstance(value, (list, tuple))):
|
| 429 |
+
if isinstance(value, string_types):
|
| 430 |
+
value = [value]
|
| 431 |
+
else:
|
| 432 |
+
value = []
|
| 433 |
+
|
| 434 |
+
if logger.isEnabledFor(logging.WARNING):
|
| 435 |
+
project_name = self['Name']
|
| 436 |
+
|
| 437 |
+
scheme = get_scheme(self.scheme)
|
| 438 |
+
if name in _PREDICATE_FIELDS and value is not None:
|
| 439 |
+
for v in value:
|
| 440 |
+
# check that the values are valid
|
| 441 |
+
if not scheme.is_valid_matcher(v.split(';')[0]):
|
| 442 |
+
logger.warning("'%s': '%s' is not valid (field '%s')", project_name, v, name)
|
| 443 |
+
# FIXME this rejects UNKNOWN, is that right?
|
| 444 |
+
elif name in _VERSIONS_FIELDS and value is not None:
|
| 445 |
+
if not scheme.is_valid_constraint_list(value):
|
| 446 |
+
logger.warning("'%s': '%s' is not a valid version (field '%s')", project_name, value, name)
|
| 447 |
+
elif name in _VERSION_FIELDS and value is not None:
|
| 448 |
+
if not scheme.is_valid_version(value):
|
| 449 |
+
logger.warning("'%s': '%s' is not a valid version (field '%s')", project_name, value, name)
|
| 450 |
+
|
| 451 |
+
if name in _UNICODEFIELDS:
|
| 452 |
+
if name == 'Description':
|
| 453 |
+
value = self._remove_line_prefix(value)
|
| 454 |
+
|
| 455 |
+
self._fields[name] = value
|
| 456 |
+
|
| 457 |
+
def get(self, name, default=_MISSING):
|
| 458 |
+
"""Get a metadata field."""
|
| 459 |
+
name = self._convert_name(name)
|
| 460 |
+
if name not in self._fields:
|
| 461 |
+
if default is _MISSING:
|
| 462 |
+
default = self._default_value(name)
|
| 463 |
+
return default
|
| 464 |
+
if name in _UNICODEFIELDS:
|
| 465 |
+
value = self._fields[name]
|
| 466 |
+
return value
|
| 467 |
+
elif name in _LISTFIELDS:
|
| 468 |
+
value = self._fields[name]
|
| 469 |
+
if value is None:
|
| 470 |
+
return []
|
| 471 |
+
res = []
|
| 472 |
+
for val in value:
|
| 473 |
+
if name not in _LISTTUPLEFIELDS:
|
| 474 |
+
res.append(val)
|
| 475 |
+
else:
|
| 476 |
+
# That's for Project-URL
|
| 477 |
+
res.append((val[0], val[1]))
|
| 478 |
+
return res
|
| 479 |
+
|
| 480 |
+
elif name in _ELEMENTSFIELD:
|
| 481 |
+
value = self._fields[name]
|
| 482 |
+
if isinstance(value, string_types):
|
| 483 |
+
return value.split(',')
|
| 484 |
+
return self._fields[name]
|
| 485 |
+
|
| 486 |
+
def check(self, strict=False):
|
| 487 |
+
"""Check if the metadata is compliant. If strict is True then raise if
|
| 488 |
+
no Name or Version are provided"""
|
| 489 |
+
self.set_metadata_version()
|
| 490 |
+
|
| 491 |
+
# XXX should check the versions (if the file was loaded)
|
| 492 |
+
missing, warnings = [], []
|
| 493 |
+
|
| 494 |
+
for attr in ('Name', 'Version'): # required by PEP 345
|
| 495 |
+
if attr not in self:
|
| 496 |
+
missing.append(attr)
|
| 497 |
+
|
| 498 |
+
if strict and missing != []:
|
| 499 |
+
msg = 'missing required metadata: %s' % ', '.join(missing)
|
| 500 |
+
raise MetadataMissingError(msg)
|
| 501 |
+
|
| 502 |
+
for attr in ('Home-page', 'Author'):
|
| 503 |
+
if attr not in self:
|
| 504 |
+
missing.append(attr)
|
| 505 |
+
|
| 506 |
+
# checking metadata 1.2 (XXX needs to check 1.1, 1.0)
|
| 507 |
+
if self['Metadata-Version'] != '1.2':
|
| 508 |
+
return missing, warnings
|
| 509 |
+
|
| 510 |
+
scheme = get_scheme(self.scheme)
|
| 511 |
+
|
| 512 |
+
def are_valid_constraints(value):
|
| 513 |
+
for v in value:
|
| 514 |
+
if not scheme.is_valid_matcher(v.split(';')[0]):
|
| 515 |
+
return False
|
| 516 |
+
return True
|
| 517 |
+
|
| 518 |
+
for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints),
|
| 519 |
+
(_VERSIONS_FIELDS, scheme.is_valid_constraint_list), (_VERSION_FIELDS,
|
| 520 |
+
scheme.is_valid_version)):
|
| 521 |
+
for field in fields:
|
| 522 |
+
value = self.get(field, None)
|
| 523 |
+
if value is not None and not controller(value):
|
| 524 |
+
warnings.append("Wrong value for '%s': %s" % (field, value))
|
| 525 |
+
|
| 526 |
+
return missing, warnings
|
| 527 |
+
|
| 528 |
+
def todict(self, skip_missing=False):
|
| 529 |
+
"""Return fields as a dict.
|
| 530 |
+
|
| 531 |
+
Field names will be converted to use the underscore-lowercase style
|
| 532 |
+
instead of hyphen-mixed case (i.e. home_page instead of Home-page).
|
| 533 |
+
This is as per https://www.python.org/dev/peps/pep-0566/#id17.
|
| 534 |
+
"""
|
| 535 |
+
self.set_metadata_version()
|
| 536 |
+
|
| 537 |
+
fields = _version2fieldlist(self['Metadata-Version'])
|
| 538 |
+
|
| 539 |
+
data = {}
|
| 540 |
+
|
| 541 |
+
for field_name in fields:
|
| 542 |
+
if not skip_missing or field_name in self._fields:
|
| 543 |
+
key = _FIELD2ATTR[field_name]
|
| 544 |
+
if key != 'project_url':
|
| 545 |
+
data[key] = self[field_name]
|
| 546 |
+
else:
|
| 547 |
+
data[key] = [','.join(u) for u in self[field_name]]
|
| 548 |
+
|
| 549 |
+
return data
|
| 550 |
+
|
| 551 |
+
def add_requirements(self, requirements):
|
| 552 |
+
if self['Metadata-Version'] == '1.1':
|
| 553 |
+
# we can't have 1.1 metadata *and* Setuptools requires
|
| 554 |
+
for field in ('Obsoletes', 'Requires', 'Provides'):
|
| 555 |
+
if field in self:
|
| 556 |
+
del self[field]
|
| 557 |
+
self['Requires-Dist'] += requirements
|
| 558 |
+
|
| 559 |
+
# Mapping API
|
| 560 |
+
# TODO could add iter* variants
|
| 561 |
+
|
| 562 |
+
def keys(self):
|
| 563 |
+
return list(_version2fieldlist(self['Metadata-Version']))
|
| 564 |
+
|
| 565 |
+
def __iter__(self):
|
| 566 |
+
for key in self.keys():
|
| 567 |
+
yield key
|
| 568 |
+
|
| 569 |
+
def values(self):
|
| 570 |
+
return [self[key] for key in self.keys()]
|
| 571 |
+
|
| 572 |
+
def items(self):
|
| 573 |
+
return [(key, self[key]) for key in self.keys()]
|
| 574 |
+
|
| 575 |
+
def __repr__(self):
|
| 576 |
+
return '<%s %s %s>' % (self.__class__.__name__, self.name, self.version)
|
| 577 |
+
|
| 578 |
+
|
| 579 |
+
METADATA_FILENAME = 'pydist.json'
|
| 580 |
+
WHEEL_METADATA_FILENAME = 'metadata.json'
|
| 581 |
+
LEGACY_METADATA_FILENAME = 'METADATA'
|
| 582 |
+
|
| 583 |
+
|
| 584 |
+
class Metadata(object):
|
| 585 |
+
"""
|
| 586 |
+
The metadata of a release. This implementation uses 2.1
|
| 587 |
+
metadata where possible. If not possible, it wraps a LegacyMetadata
|
| 588 |
+
instance which handles the key-value metadata format.
|
| 589 |
+
"""
|
| 590 |
+
|
| 591 |
+
METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$')
|
| 592 |
+
|
| 593 |
+
NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I)
|
| 594 |
+
|
| 595 |
+
FIELDNAME_MATCHER = re.compile('^[A-Z]([0-9A-Z-]*[0-9A-Z])?$', re.I)
|
| 596 |
+
|
| 597 |
+
VERSION_MATCHER = PEP440_VERSION_RE
|
| 598 |
+
|
| 599 |
+
SUMMARY_MATCHER = re.compile('.{1,2047}')
|
| 600 |
+
|
| 601 |
+
METADATA_VERSION = '2.0'
|
| 602 |
+
|
| 603 |
+
GENERATOR = 'distlib (%s)' % __version__
|
| 604 |
+
|
| 605 |
+
MANDATORY_KEYS = {
|
| 606 |
+
'name': (),
|
| 607 |
+
'version': (),
|
| 608 |
+
'summary': ('legacy', ),
|
| 609 |
+
}
|
| 610 |
+
|
| 611 |
+
INDEX_KEYS = ('name version license summary description author '
|
| 612 |
+
'author_email keywords platform home_page classifiers '
|
| 613 |
+
'download_url')
|
| 614 |
+
|
| 615 |
+
DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires '
|
| 616 |
+
'dev_requires provides meta_requires obsoleted_by '
|
| 617 |
+
'supports_environments')
|
| 618 |
+
|
| 619 |
+
SYNTAX_VALIDATORS = {
|
| 620 |
+
'metadata_version': (METADATA_VERSION_MATCHER, ()),
|
| 621 |
+
'name': (NAME_MATCHER, ('legacy', )),
|
| 622 |
+
'version': (VERSION_MATCHER, ('legacy', )),
|
| 623 |
+
'summary': (SUMMARY_MATCHER, ('legacy', )),
|
| 624 |
+
'dynamic': (FIELDNAME_MATCHER, ('legacy', )),
|
| 625 |
+
}
|
| 626 |
+
|
| 627 |
+
__slots__ = ('_legacy', '_data', 'scheme')
|
| 628 |
+
|
| 629 |
+
def __init__(self, path=None, fileobj=None, mapping=None, scheme='default'):
|
| 630 |
+
if [path, fileobj, mapping].count(None) < 2:
|
| 631 |
+
raise TypeError('path, fileobj and mapping are exclusive')
|
| 632 |
+
self._legacy = None
|
| 633 |
+
self._data = None
|
| 634 |
+
self.scheme = scheme
|
| 635 |
+
# import pdb; pdb.set_trace()
|
| 636 |
+
if mapping is not None:
|
| 637 |
+
try:
|
| 638 |
+
self._validate_mapping(mapping, scheme)
|
| 639 |
+
self._data = mapping
|
| 640 |
+
except MetadataUnrecognizedVersionError:
|
| 641 |
+
self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme)
|
| 642 |
+
self.validate()
|
| 643 |
+
else:
|
| 644 |
+
data = None
|
| 645 |
+
if path:
|
| 646 |
+
with open(path, 'rb') as f:
|
| 647 |
+
data = f.read()
|
| 648 |
+
elif fileobj:
|
| 649 |
+
data = fileobj.read()
|
| 650 |
+
if data is None:
|
| 651 |
+
# Initialised with no args - to be added
|
| 652 |
+
self._data = {
|
| 653 |
+
'metadata_version': self.METADATA_VERSION,
|
| 654 |
+
'generator': self.GENERATOR,
|
| 655 |
+
}
|
| 656 |
+
else:
|
| 657 |
+
if not isinstance(data, text_type):
|
| 658 |
+
data = data.decode('utf-8')
|
| 659 |
+
try:
|
| 660 |
+
self._data = json.loads(data)
|
| 661 |
+
self._validate_mapping(self._data, scheme)
|
| 662 |
+
except ValueError:
|
| 663 |
+
# Note: MetadataUnrecognizedVersionError does not
|
| 664 |
+
# inherit from ValueError (it's a DistlibException,
|
| 665 |
+
# which should not inherit from ValueError).
|
| 666 |
+
# The ValueError comes from the json.load - if that
|
| 667 |
+
# succeeds and we get a validation error, we want
|
| 668 |
+
# that to propagate
|
| 669 |
+
self._legacy = LegacyMetadata(fileobj=StringIO(data), scheme=scheme)
|
| 670 |
+
self.validate()
|
| 671 |
+
|
| 672 |
+
common_keys = set(('name', 'version', 'license', 'keywords', 'summary'))
|
| 673 |
+
|
| 674 |
+
none_list = (None, list)
|
| 675 |
+
none_dict = (None, dict)
|
| 676 |
+
|
| 677 |
+
mapped_keys = {
|
| 678 |
+
'run_requires': ('Requires-Dist', list),
|
| 679 |
+
'build_requires': ('Setup-Requires-Dist', list),
|
| 680 |
+
'dev_requires': none_list,
|
| 681 |
+
'test_requires': none_list,
|
| 682 |
+
'meta_requires': none_list,
|
| 683 |
+
'extras': ('Provides-Extra', list),
|
| 684 |
+
'modules': none_list,
|
| 685 |
+
'namespaces': none_list,
|
| 686 |
+
'exports': none_dict,
|
| 687 |
+
'commands': none_dict,
|
| 688 |
+
'classifiers': ('Classifier', list),
|
| 689 |
+
'source_url': ('Download-URL', None),
|
| 690 |
+
'metadata_version': ('Metadata-Version', None),
|
| 691 |
+
}
|
| 692 |
+
|
| 693 |
+
del none_list, none_dict
|
| 694 |
+
|
| 695 |
+
def __getattribute__(self, key):
|
| 696 |
+
common = object.__getattribute__(self, 'common_keys')
|
| 697 |
+
mapped = object.__getattribute__(self, 'mapped_keys')
|
| 698 |
+
if key in mapped:
|
| 699 |
+
lk, maker = mapped[key]
|
| 700 |
+
if self._legacy:
|
| 701 |
+
if lk is None:
|
| 702 |
+
result = None if maker is None else maker()
|
| 703 |
+
else:
|
| 704 |
+
result = self._legacy.get(lk)
|
| 705 |
+
else:
|
| 706 |
+
value = None if maker is None else maker()
|
| 707 |
+
if key not in ('commands', 'exports', 'modules', 'namespaces', 'classifiers'):
|
| 708 |
+
result = self._data.get(key, value)
|
| 709 |
+
else:
|
| 710 |
+
# special cases for PEP 459
|
| 711 |
+
sentinel = object()
|
| 712 |
+
result = sentinel
|
| 713 |
+
d = self._data.get('extensions')
|
| 714 |
+
if d:
|
| 715 |
+
if key == 'commands':
|
| 716 |
+
result = d.get('python.commands', value)
|
| 717 |
+
elif key == 'classifiers':
|
| 718 |
+
d = d.get('python.details')
|
| 719 |
+
if d:
|
| 720 |
+
result = d.get(key, value)
|
| 721 |
+
else:
|
| 722 |
+
d = d.get('python.exports')
|
| 723 |
+
if not d:
|
| 724 |
+
d = self._data.get('python.exports')
|
| 725 |
+
if d:
|
| 726 |
+
result = d.get(key, value)
|
| 727 |
+
if result is sentinel:
|
| 728 |
+
result = value
|
| 729 |
+
elif key not in common:
|
| 730 |
+
result = object.__getattribute__(self, key)
|
| 731 |
+
elif self._legacy:
|
| 732 |
+
result = self._legacy.get(key)
|
| 733 |
+
else:
|
| 734 |
+
result = self._data.get(key)
|
| 735 |
+
return result
|
| 736 |
+
|
| 737 |
+
def _validate_value(self, key, value, scheme=None):
|
| 738 |
+
if key in self.SYNTAX_VALIDATORS:
|
| 739 |
+
pattern, exclusions = self.SYNTAX_VALIDATORS[key]
|
| 740 |
+
if (scheme or self.scheme) not in exclusions:
|
| 741 |
+
m = pattern.match(value)
|
| 742 |
+
if not m:
|
| 743 |
+
raise MetadataInvalidError("'%s' is an invalid value for "
|
| 744 |
+
"the '%s' property" % (value, key))
|
| 745 |
+
|
| 746 |
+
def __setattr__(self, key, value):
|
| 747 |
+
self._validate_value(key, value)
|
| 748 |
+
common = object.__getattribute__(self, 'common_keys')
|
| 749 |
+
mapped = object.__getattribute__(self, 'mapped_keys')
|
| 750 |
+
if key in mapped:
|
| 751 |
+
lk, _ = mapped[key]
|
| 752 |
+
if self._legacy:
|
| 753 |
+
if lk is None:
|
| 754 |
+
raise NotImplementedError
|
| 755 |
+
self._legacy[lk] = value
|
| 756 |
+
elif key not in ('commands', 'exports', 'modules', 'namespaces', 'classifiers'):
|
| 757 |
+
self._data[key] = value
|
| 758 |
+
else:
|
| 759 |
+
# special cases for PEP 459
|
| 760 |
+
d = self._data.setdefault('extensions', {})
|
| 761 |
+
if key == 'commands':
|
| 762 |
+
d['python.commands'] = value
|
| 763 |
+
elif key == 'classifiers':
|
| 764 |
+
d = d.setdefault('python.details', {})
|
| 765 |
+
d[key] = value
|
| 766 |
+
else:
|
| 767 |
+
d = d.setdefault('python.exports', {})
|
| 768 |
+
d[key] = value
|
| 769 |
+
elif key not in common:
|
| 770 |
+
object.__setattr__(self, key, value)
|
| 771 |
+
else:
|
| 772 |
+
if key == 'keywords':
|
| 773 |
+
if isinstance(value, string_types):
|
| 774 |
+
value = value.strip()
|
| 775 |
+
if value:
|
| 776 |
+
value = value.split()
|
| 777 |
+
else:
|
| 778 |
+
value = []
|
| 779 |
+
if self._legacy:
|
| 780 |
+
self._legacy[key] = value
|
| 781 |
+
else:
|
| 782 |
+
self._data[key] = value
|
| 783 |
+
|
| 784 |
+
@property
|
| 785 |
+
def name_and_version(self):
|
| 786 |
+
return _get_name_and_version(self.name, self.version, True)
|
| 787 |
+
|
| 788 |
+
@property
|
| 789 |
+
def provides(self):
|
| 790 |
+
if self._legacy:
|
| 791 |
+
result = self._legacy['Provides-Dist']
|
| 792 |
+
else:
|
| 793 |
+
result = self._data.setdefault('provides', [])
|
| 794 |
+
s = '%s (%s)' % (self.name, self.version)
|
| 795 |
+
if s not in result:
|
| 796 |
+
result.append(s)
|
| 797 |
+
return result
|
| 798 |
+
|
| 799 |
+
@provides.setter
|
| 800 |
+
def provides(self, value):
|
| 801 |
+
if self._legacy:
|
| 802 |
+
self._legacy['Provides-Dist'] = value
|
| 803 |
+
else:
|
| 804 |
+
self._data['provides'] = value
|
| 805 |
+
|
| 806 |
+
def get_requirements(self, reqts, extras=None, env=None):
|
| 807 |
+
"""
|
| 808 |
+
Base method to get dependencies, given a set of extras
|
| 809 |
+
to satisfy and an optional environment context.
|
| 810 |
+
:param reqts: A list of sometimes-wanted dependencies,
|
| 811 |
+
perhaps dependent on extras and environment.
|
| 812 |
+
:param extras: A list of optional components being requested.
|
| 813 |
+
:param env: An optional environment for marker evaluation.
|
| 814 |
+
"""
|
| 815 |
+
if self._legacy:
|
| 816 |
+
result = reqts
|
| 817 |
+
else:
|
| 818 |
+
result = []
|
| 819 |
+
extras = get_extras(extras or [], self.extras)
|
| 820 |
+
for d in reqts:
|
| 821 |
+
if 'extra' not in d and 'environment' not in d:
|
| 822 |
+
# unconditional
|
| 823 |
+
include = True
|
| 824 |
+
else:
|
| 825 |
+
if 'extra' not in d:
|
| 826 |
+
# Not extra-dependent - only environment-dependent
|
| 827 |
+
include = True
|
| 828 |
+
else:
|
| 829 |
+
include = d.get('extra') in extras
|
| 830 |
+
if include:
|
| 831 |
+
# Not excluded because of extras, check environment
|
| 832 |
+
marker = d.get('environment')
|
| 833 |
+
if marker:
|
| 834 |
+
include = interpret(marker, env)
|
| 835 |
+
if include:
|
| 836 |
+
result.extend(d['requires'])
|
| 837 |
+
for key in ('build', 'dev', 'test'):
|
| 838 |
+
e = ':%s:' % key
|
| 839 |
+
if e in extras:
|
| 840 |
+
extras.remove(e)
|
| 841 |
+
# A recursive call, but it should terminate since 'test'
|
| 842 |
+
# has been removed from the extras
|
| 843 |
+
reqts = self._data.get('%s_requires' % key, [])
|
| 844 |
+
result.extend(self.get_requirements(reqts, extras=extras, env=env))
|
| 845 |
+
return result
|
| 846 |
+
|
| 847 |
+
@property
|
| 848 |
+
def dictionary(self):
|
| 849 |
+
if self._legacy:
|
| 850 |
+
return self._from_legacy()
|
| 851 |
+
return self._data
|
| 852 |
+
|
| 853 |
+
@property
|
| 854 |
+
def dependencies(self):
|
| 855 |
+
if self._legacy:
|
| 856 |
+
raise NotImplementedError
|
| 857 |
+
else:
|
| 858 |
+
return extract_by_key(self._data, self.DEPENDENCY_KEYS)
|
| 859 |
+
|
| 860 |
+
@dependencies.setter
|
| 861 |
+
def dependencies(self, value):
|
| 862 |
+
if self._legacy:
|
| 863 |
+
raise NotImplementedError
|
| 864 |
+
else:
|
| 865 |
+
self._data.update(value)
|
| 866 |
+
|
| 867 |
+
def _validate_mapping(self, mapping, scheme):
|
| 868 |
+
if mapping.get('metadata_version') != self.METADATA_VERSION:
|
| 869 |
+
raise MetadataUnrecognizedVersionError()
|
| 870 |
+
missing = []
|
| 871 |
+
for key, exclusions in self.MANDATORY_KEYS.items():
|
| 872 |
+
if key not in mapping:
|
| 873 |
+
if scheme not in exclusions:
|
| 874 |
+
missing.append(key)
|
| 875 |
+
if missing:
|
| 876 |
+
msg = 'Missing metadata items: %s' % ', '.join(missing)
|
| 877 |
+
raise MetadataMissingError(msg)
|
| 878 |
+
for k, v in mapping.items():
|
| 879 |
+
self._validate_value(k, v, scheme)
|
| 880 |
+
|
| 881 |
+
def validate(self):
|
| 882 |
+
if self._legacy:
|
| 883 |
+
missing, warnings = self._legacy.check(True)
|
| 884 |
+
if missing or warnings:
|
| 885 |
+
logger.warning('Metadata: missing: %s, warnings: %s', missing, warnings)
|
| 886 |
+
else:
|
| 887 |
+
self._validate_mapping(self._data, self.scheme)
|
| 888 |
+
|
| 889 |
+
def todict(self):
|
| 890 |
+
if self._legacy:
|
| 891 |
+
return self._legacy.todict(True)
|
| 892 |
+
else:
|
| 893 |
+
result = extract_by_key(self._data, self.INDEX_KEYS)
|
| 894 |
+
return result
|
| 895 |
+
|
| 896 |
+
def _from_legacy(self):
|
| 897 |
+
assert self._legacy and not self._data
|
| 898 |
+
result = {
|
| 899 |
+
'metadata_version': self.METADATA_VERSION,
|
| 900 |
+
'generator': self.GENERATOR,
|
| 901 |
+
}
|
| 902 |
+
lmd = self._legacy.todict(True) # skip missing ones
|
| 903 |
+
for k in ('name', 'version', 'license', 'summary', 'description', 'classifier'):
|
| 904 |
+
if k in lmd:
|
| 905 |
+
if k == 'classifier':
|
| 906 |
+
nk = 'classifiers'
|
| 907 |
+
else:
|
| 908 |
+
nk = k
|
| 909 |
+
result[nk] = lmd[k]
|
| 910 |
+
kw = lmd.get('Keywords', [])
|
| 911 |
+
if kw == ['']:
|
| 912 |
+
kw = []
|
| 913 |
+
result['keywords'] = kw
|
| 914 |
+
keys = (('requires_dist', 'run_requires'), ('setup_requires_dist', 'build_requires'))
|
| 915 |
+
for ok, nk in keys:
|
| 916 |
+
if ok in lmd and lmd[ok]:
|
| 917 |
+
result[nk] = [{'requires': lmd[ok]}]
|
| 918 |
+
result['provides'] = self.provides
|
| 919 |
+
# author = {}
|
| 920 |
+
# maintainer = {}
|
| 921 |
+
return result
|
| 922 |
+
|
| 923 |
+
LEGACY_MAPPING = {
|
| 924 |
+
'name': 'Name',
|
| 925 |
+
'version': 'Version',
|
| 926 |
+
('extensions', 'python.details', 'license'): 'License',
|
| 927 |
+
'summary': 'Summary',
|
| 928 |
+
'description': 'Description',
|
| 929 |
+
('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page',
|
| 930 |
+
('extensions', 'python.project', 'contacts', 0, 'name'): 'Author',
|
| 931 |
+
('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email',
|
| 932 |
+
'source_url': 'Download-URL',
|
| 933 |
+
('extensions', 'python.details', 'classifiers'): 'Classifier',
|
| 934 |
+
}
|
| 935 |
+
|
| 936 |
+
def _to_legacy(self):
|
| 937 |
+
|
| 938 |
+
def process_entries(entries):
|
| 939 |
+
reqts = set()
|
| 940 |
+
for e in entries:
|
| 941 |
+
extra = e.get('extra')
|
| 942 |
+
env = e.get('environment')
|
| 943 |
+
rlist = e['requires']
|
| 944 |
+
for r in rlist:
|
| 945 |
+
if not env and not extra:
|
| 946 |
+
reqts.add(r)
|
| 947 |
+
else:
|
| 948 |
+
marker = ''
|
| 949 |
+
if extra:
|
| 950 |
+
marker = 'extra == "%s"' % extra
|
| 951 |
+
if env:
|
| 952 |
+
if marker:
|
| 953 |
+
marker = '(%s) and %s' % (env, marker)
|
| 954 |
+
else:
|
| 955 |
+
marker = env
|
| 956 |
+
reqts.add(';'.join((r, marker)))
|
| 957 |
+
return reqts
|
| 958 |
+
|
| 959 |
+
assert self._data and not self._legacy
|
| 960 |
+
result = LegacyMetadata()
|
| 961 |
+
nmd = self._data
|
| 962 |
+
# import pdb; pdb.set_trace()
|
| 963 |
+
for nk, ok in self.LEGACY_MAPPING.items():
|
| 964 |
+
if not isinstance(nk, tuple):
|
| 965 |
+
if nk in nmd:
|
| 966 |
+
result[ok] = nmd[nk]
|
| 967 |
+
else:
|
| 968 |
+
d = nmd
|
| 969 |
+
found = True
|
| 970 |
+
for k in nk:
|
| 971 |
+
try:
|
| 972 |
+
d = d[k]
|
| 973 |
+
except (KeyError, IndexError):
|
| 974 |
+
found = False
|
| 975 |
+
break
|
| 976 |
+
if found:
|
| 977 |
+
result[ok] = d
|
| 978 |
+
r1 = process_entries(self.run_requires + self.meta_requires)
|
| 979 |
+
r2 = process_entries(self.build_requires + self.dev_requires)
|
| 980 |
+
if self.extras:
|
| 981 |
+
result['Provides-Extra'] = sorted(self.extras)
|
| 982 |
+
result['Requires-Dist'] = sorted(r1)
|
| 983 |
+
result['Setup-Requires-Dist'] = sorted(r2)
|
| 984 |
+
# TODO: any other fields wanted
|
| 985 |
+
return result
|
| 986 |
+
|
| 987 |
+
def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True):
|
| 988 |
+
if [path, fileobj].count(None) != 1:
|
| 989 |
+
raise ValueError('Exactly one of path and fileobj is needed')
|
| 990 |
+
self.validate()
|
| 991 |
+
if legacy:
|
| 992 |
+
if self._legacy:
|
| 993 |
+
legacy_md = self._legacy
|
| 994 |
+
else:
|
| 995 |
+
legacy_md = self._to_legacy()
|
| 996 |
+
if path:
|
| 997 |
+
legacy_md.write(path, skip_unknown=skip_unknown)
|
| 998 |
+
else:
|
| 999 |
+
legacy_md.write_file(fileobj, skip_unknown=skip_unknown)
|
| 1000 |
+
else:
|
| 1001 |
+
if self._legacy:
|
| 1002 |
+
d = self._from_legacy()
|
| 1003 |
+
else:
|
| 1004 |
+
d = self._data
|
| 1005 |
+
if fileobj:
|
| 1006 |
+
json.dump(d, fileobj, ensure_ascii=True, indent=2, sort_keys=True)
|
| 1007 |
+
else:
|
| 1008 |
+
with codecs.open(path, 'w', 'utf-8') as f:
|
| 1009 |
+
json.dump(d, f, ensure_ascii=True, indent=2, sort_keys=True)
|
| 1010 |
+
|
| 1011 |
+
def add_requirements(self, requirements):
|
| 1012 |
+
if self._legacy:
|
| 1013 |
+
self._legacy.add_requirements(requirements)
|
| 1014 |
+
else:
|
| 1015 |
+
run_requires = self._data.setdefault('run_requires', [])
|
| 1016 |
+
always = None
|
| 1017 |
+
for entry in run_requires:
|
| 1018 |
+
if 'environment' not in entry and 'extra' not in entry:
|
| 1019 |
+
always = entry
|
| 1020 |
+
break
|
| 1021 |
+
if always is None:
|
| 1022 |
+
always = {'requires': requirements}
|
| 1023 |
+
run_requires.insert(0, always)
|
| 1024 |
+
else:
|
| 1025 |
+
rset = set(always['requires']) | set(requirements)
|
| 1026 |
+
always['requires'] = sorted(rset)
|
| 1027 |
+
|
| 1028 |
+
def __repr__(self):
|
| 1029 |
+
name = self.name or '(no name)'
|
| 1030 |
+
version = self.version or 'no version'
|
| 1031 |
+
return '<%s %s %s (%s)>' % (self.__class__.__name__, self.metadata_version, name, version)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/resources.py
ADDED
|
@@ -0,0 +1,358 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
#
|
| 3 |
+
# Copyright (C) 2013-2017 Vinay Sajip.
|
| 4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
| 5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 6 |
+
#
|
| 7 |
+
from __future__ import unicode_literals
|
| 8 |
+
|
| 9 |
+
import bisect
|
| 10 |
+
import io
|
| 11 |
+
import logging
|
| 12 |
+
import os
|
| 13 |
+
import pkgutil
|
| 14 |
+
import sys
|
| 15 |
+
import types
|
| 16 |
+
import zipimport
|
| 17 |
+
|
| 18 |
+
from . import DistlibException
|
| 19 |
+
from .util import cached_property, get_cache_base, Cache
|
| 20 |
+
|
| 21 |
+
logger = logging.getLogger(__name__)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
cache = None # created when needed
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class ResourceCache(Cache):
|
| 28 |
+
def __init__(self, base=None):
|
| 29 |
+
if base is None:
|
| 30 |
+
# Use native string to avoid issues on 2.x: see Python #20140.
|
| 31 |
+
base = os.path.join(get_cache_base(), str('resource-cache'))
|
| 32 |
+
super(ResourceCache, self).__init__(base)
|
| 33 |
+
|
| 34 |
+
def is_stale(self, resource, path):
|
| 35 |
+
"""
|
| 36 |
+
Is the cache stale for the given resource?
|
| 37 |
+
|
| 38 |
+
:param resource: The :class:`Resource` being cached.
|
| 39 |
+
:param path: The path of the resource in the cache.
|
| 40 |
+
:return: True if the cache is stale.
|
| 41 |
+
"""
|
| 42 |
+
# Cache invalidation is a hard problem :-)
|
| 43 |
+
return True
|
| 44 |
+
|
| 45 |
+
def get(self, resource):
|
| 46 |
+
"""
|
| 47 |
+
Get a resource into the cache,
|
| 48 |
+
|
| 49 |
+
:param resource: A :class:`Resource` instance.
|
| 50 |
+
:return: The pathname of the resource in the cache.
|
| 51 |
+
"""
|
| 52 |
+
prefix, path = resource.finder.get_cache_info(resource)
|
| 53 |
+
if prefix is None:
|
| 54 |
+
result = path
|
| 55 |
+
else:
|
| 56 |
+
result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
|
| 57 |
+
dirname = os.path.dirname(result)
|
| 58 |
+
if not os.path.isdir(dirname):
|
| 59 |
+
os.makedirs(dirname)
|
| 60 |
+
if not os.path.exists(result):
|
| 61 |
+
stale = True
|
| 62 |
+
else:
|
| 63 |
+
stale = self.is_stale(resource, path)
|
| 64 |
+
if stale:
|
| 65 |
+
# write the bytes of the resource to the cache location
|
| 66 |
+
with open(result, 'wb') as f:
|
| 67 |
+
f.write(resource.bytes)
|
| 68 |
+
return result
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class ResourceBase(object):
|
| 72 |
+
def __init__(self, finder, name):
|
| 73 |
+
self.finder = finder
|
| 74 |
+
self.name = name
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class Resource(ResourceBase):
|
| 78 |
+
"""
|
| 79 |
+
A class representing an in-package resource, such as a data file. This is
|
| 80 |
+
not normally instantiated by user code, but rather by a
|
| 81 |
+
:class:`ResourceFinder` which manages the resource.
|
| 82 |
+
"""
|
| 83 |
+
is_container = False # Backwards compatibility
|
| 84 |
+
|
| 85 |
+
def as_stream(self):
|
| 86 |
+
"""
|
| 87 |
+
Get the resource as a stream.
|
| 88 |
+
|
| 89 |
+
This is not a property to make it obvious that it returns a new stream
|
| 90 |
+
each time.
|
| 91 |
+
"""
|
| 92 |
+
return self.finder.get_stream(self)
|
| 93 |
+
|
| 94 |
+
@cached_property
|
| 95 |
+
def file_path(self):
|
| 96 |
+
global cache
|
| 97 |
+
if cache is None:
|
| 98 |
+
cache = ResourceCache()
|
| 99 |
+
return cache.get(self)
|
| 100 |
+
|
| 101 |
+
@cached_property
|
| 102 |
+
def bytes(self):
|
| 103 |
+
return self.finder.get_bytes(self)
|
| 104 |
+
|
| 105 |
+
@cached_property
|
| 106 |
+
def size(self):
|
| 107 |
+
return self.finder.get_size(self)
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
class ResourceContainer(ResourceBase):
|
| 111 |
+
is_container = True # Backwards compatibility
|
| 112 |
+
|
| 113 |
+
@cached_property
|
| 114 |
+
def resources(self):
|
| 115 |
+
return self.finder.get_resources(self)
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
class ResourceFinder(object):
|
| 119 |
+
"""
|
| 120 |
+
Resource finder for file system resources.
|
| 121 |
+
"""
|
| 122 |
+
|
| 123 |
+
if sys.platform.startswith('java'):
|
| 124 |
+
skipped_extensions = ('.pyc', '.pyo', '.class')
|
| 125 |
+
else:
|
| 126 |
+
skipped_extensions = ('.pyc', '.pyo')
|
| 127 |
+
|
| 128 |
+
def __init__(self, module):
|
| 129 |
+
self.module = module
|
| 130 |
+
self.loader = getattr(module, '__loader__', None)
|
| 131 |
+
self.base = os.path.dirname(getattr(module, '__file__', ''))
|
| 132 |
+
|
| 133 |
+
def _adjust_path(self, path):
|
| 134 |
+
return os.path.realpath(path)
|
| 135 |
+
|
| 136 |
+
def _make_path(self, resource_name):
|
| 137 |
+
# Issue #50: need to preserve type of path on Python 2.x
|
| 138 |
+
# like os.path._get_sep
|
| 139 |
+
if isinstance(resource_name, bytes): # should only happen on 2.x
|
| 140 |
+
sep = b'/'
|
| 141 |
+
else:
|
| 142 |
+
sep = '/'
|
| 143 |
+
parts = resource_name.split(sep)
|
| 144 |
+
parts.insert(0, self.base)
|
| 145 |
+
result = os.path.join(*parts)
|
| 146 |
+
return self._adjust_path(result)
|
| 147 |
+
|
| 148 |
+
def _find(self, path):
|
| 149 |
+
return os.path.exists(path)
|
| 150 |
+
|
| 151 |
+
def get_cache_info(self, resource):
|
| 152 |
+
return None, resource.path
|
| 153 |
+
|
| 154 |
+
def find(self, resource_name):
|
| 155 |
+
path = self._make_path(resource_name)
|
| 156 |
+
if not self._find(path):
|
| 157 |
+
result = None
|
| 158 |
+
else:
|
| 159 |
+
if self._is_directory(path):
|
| 160 |
+
result = ResourceContainer(self, resource_name)
|
| 161 |
+
else:
|
| 162 |
+
result = Resource(self, resource_name)
|
| 163 |
+
result.path = path
|
| 164 |
+
return result
|
| 165 |
+
|
| 166 |
+
def get_stream(self, resource):
|
| 167 |
+
return open(resource.path, 'rb')
|
| 168 |
+
|
| 169 |
+
def get_bytes(self, resource):
|
| 170 |
+
with open(resource.path, 'rb') as f:
|
| 171 |
+
return f.read()
|
| 172 |
+
|
| 173 |
+
def get_size(self, resource):
|
| 174 |
+
return os.path.getsize(resource.path)
|
| 175 |
+
|
| 176 |
+
def get_resources(self, resource):
|
| 177 |
+
def allowed(f):
|
| 178 |
+
return (f != '__pycache__' and not
|
| 179 |
+
f.endswith(self.skipped_extensions))
|
| 180 |
+
return set([f for f in os.listdir(resource.path) if allowed(f)])
|
| 181 |
+
|
| 182 |
+
def is_container(self, resource):
|
| 183 |
+
return self._is_directory(resource.path)
|
| 184 |
+
|
| 185 |
+
_is_directory = staticmethod(os.path.isdir)
|
| 186 |
+
|
| 187 |
+
def iterator(self, resource_name):
|
| 188 |
+
resource = self.find(resource_name)
|
| 189 |
+
if resource is not None:
|
| 190 |
+
todo = [resource]
|
| 191 |
+
while todo:
|
| 192 |
+
resource = todo.pop(0)
|
| 193 |
+
yield resource
|
| 194 |
+
if resource.is_container:
|
| 195 |
+
rname = resource.name
|
| 196 |
+
for name in resource.resources:
|
| 197 |
+
if not rname:
|
| 198 |
+
new_name = name
|
| 199 |
+
else:
|
| 200 |
+
new_name = '/'.join([rname, name])
|
| 201 |
+
child = self.find(new_name)
|
| 202 |
+
if child.is_container:
|
| 203 |
+
todo.append(child)
|
| 204 |
+
else:
|
| 205 |
+
yield child
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
class ZipResourceFinder(ResourceFinder):
|
| 209 |
+
"""
|
| 210 |
+
Resource finder for resources in .zip files.
|
| 211 |
+
"""
|
| 212 |
+
def __init__(self, module):
|
| 213 |
+
super(ZipResourceFinder, self).__init__(module)
|
| 214 |
+
archive = self.loader.archive
|
| 215 |
+
self.prefix_len = 1 + len(archive)
|
| 216 |
+
# PyPy doesn't have a _files attr on zipimporter, and you can't set one
|
| 217 |
+
if hasattr(self.loader, '_files'):
|
| 218 |
+
self._files = self.loader._files
|
| 219 |
+
else:
|
| 220 |
+
self._files = zipimport._zip_directory_cache[archive]
|
| 221 |
+
self.index = sorted(self._files)
|
| 222 |
+
|
| 223 |
+
def _adjust_path(self, path):
|
| 224 |
+
return path
|
| 225 |
+
|
| 226 |
+
def _find(self, path):
|
| 227 |
+
path = path[self.prefix_len:]
|
| 228 |
+
if path in self._files:
|
| 229 |
+
result = True
|
| 230 |
+
else:
|
| 231 |
+
if path and path[-1] != os.sep:
|
| 232 |
+
path = path + os.sep
|
| 233 |
+
i = bisect.bisect(self.index, path)
|
| 234 |
+
try:
|
| 235 |
+
result = self.index[i].startswith(path)
|
| 236 |
+
except IndexError:
|
| 237 |
+
result = False
|
| 238 |
+
if not result:
|
| 239 |
+
logger.debug('_find failed: %r %r', path, self.loader.prefix)
|
| 240 |
+
else:
|
| 241 |
+
logger.debug('_find worked: %r %r', path, self.loader.prefix)
|
| 242 |
+
return result
|
| 243 |
+
|
| 244 |
+
def get_cache_info(self, resource):
|
| 245 |
+
prefix = self.loader.archive
|
| 246 |
+
path = resource.path[1 + len(prefix):]
|
| 247 |
+
return prefix, path
|
| 248 |
+
|
| 249 |
+
def get_bytes(self, resource):
|
| 250 |
+
return self.loader.get_data(resource.path)
|
| 251 |
+
|
| 252 |
+
def get_stream(self, resource):
|
| 253 |
+
return io.BytesIO(self.get_bytes(resource))
|
| 254 |
+
|
| 255 |
+
def get_size(self, resource):
|
| 256 |
+
path = resource.path[self.prefix_len:]
|
| 257 |
+
return self._files[path][3]
|
| 258 |
+
|
| 259 |
+
def get_resources(self, resource):
|
| 260 |
+
path = resource.path[self.prefix_len:]
|
| 261 |
+
if path and path[-1] != os.sep:
|
| 262 |
+
path += os.sep
|
| 263 |
+
plen = len(path)
|
| 264 |
+
result = set()
|
| 265 |
+
i = bisect.bisect(self.index, path)
|
| 266 |
+
while i < len(self.index):
|
| 267 |
+
if not self.index[i].startswith(path):
|
| 268 |
+
break
|
| 269 |
+
s = self.index[i][plen:]
|
| 270 |
+
result.add(s.split(os.sep, 1)[0]) # only immediate children
|
| 271 |
+
i += 1
|
| 272 |
+
return result
|
| 273 |
+
|
| 274 |
+
def _is_directory(self, path):
|
| 275 |
+
path = path[self.prefix_len:]
|
| 276 |
+
if path and path[-1] != os.sep:
|
| 277 |
+
path += os.sep
|
| 278 |
+
i = bisect.bisect(self.index, path)
|
| 279 |
+
try:
|
| 280 |
+
result = self.index[i].startswith(path)
|
| 281 |
+
except IndexError:
|
| 282 |
+
result = False
|
| 283 |
+
return result
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
_finder_registry = {
|
| 287 |
+
type(None): ResourceFinder,
|
| 288 |
+
zipimport.zipimporter: ZipResourceFinder
|
| 289 |
+
}
|
| 290 |
+
|
| 291 |
+
try:
|
| 292 |
+
# In Python 3.6, _frozen_importlib -> _frozen_importlib_external
|
| 293 |
+
try:
|
| 294 |
+
import _frozen_importlib_external as _fi
|
| 295 |
+
except ImportError:
|
| 296 |
+
import _frozen_importlib as _fi
|
| 297 |
+
_finder_registry[_fi.SourceFileLoader] = ResourceFinder
|
| 298 |
+
_finder_registry[_fi.FileFinder] = ResourceFinder
|
| 299 |
+
# See issue #146
|
| 300 |
+
_finder_registry[_fi.SourcelessFileLoader] = ResourceFinder
|
| 301 |
+
del _fi
|
| 302 |
+
except (ImportError, AttributeError):
|
| 303 |
+
pass
|
| 304 |
+
|
| 305 |
+
|
| 306 |
+
def register_finder(loader, finder_maker):
|
| 307 |
+
_finder_registry[type(loader)] = finder_maker
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
_finder_cache = {}
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
def finder(package):
|
| 314 |
+
"""
|
| 315 |
+
Return a resource finder for a package.
|
| 316 |
+
:param package: The name of the package.
|
| 317 |
+
:return: A :class:`ResourceFinder` instance for the package.
|
| 318 |
+
"""
|
| 319 |
+
if package in _finder_cache:
|
| 320 |
+
result = _finder_cache[package]
|
| 321 |
+
else:
|
| 322 |
+
if package not in sys.modules:
|
| 323 |
+
__import__(package)
|
| 324 |
+
module = sys.modules[package]
|
| 325 |
+
path = getattr(module, '__path__', None)
|
| 326 |
+
if path is None:
|
| 327 |
+
raise DistlibException('You cannot get a finder for a module, '
|
| 328 |
+
'only for a package')
|
| 329 |
+
loader = getattr(module, '__loader__', None)
|
| 330 |
+
finder_maker = _finder_registry.get(type(loader))
|
| 331 |
+
if finder_maker is None:
|
| 332 |
+
raise DistlibException('Unable to locate finder for %r' % package)
|
| 333 |
+
result = finder_maker(module)
|
| 334 |
+
_finder_cache[package] = result
|
| 335 |
+
return result
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
_dummy_module = types.ModuleType(str('__dummy__'))
|
| 339 |
+
|
| 340 |
+
|
| 341 |
+
def finder_for_path(path):
|
| 342 |
+
"""
|
| 343 |
+
Return a resource finder for a path, which should represent a container.
|
| 344 |
+
|
| 345 |
+
:param path: The path.
|
| 346 |
+
:return: A :class:`ResourceFinder` instance for the path.
|
| 347 |
+
"""
|
| 348 |
+
result = None
|
| 349 |
+
# calls any path hooks, gets importer into cache
|
| 350 |
+
pkgutil.get_importer(path)
|
| 351 |
+
loader = sys.path_importer_cache.get(path)
|
| 352 |
+
finder = _finder_registry.get(type(loader))
|
| 353 |
+
if finder:
|
| 354 |
+
module = _dummy_module
|
| 355 |
+
module.__file__ = os.path.join(path, '')
|
| 356 |
+
module.__loader__ = loader
|
| 357 |
+
result = finder(module)
|
| 358 |
+
return result
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/scripts.py
ADDED
|
@@ -0,0 +1,447 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
#
|
| 3 |
+
# Copyright (C) 2013-2023 Vinay Sajip.
|
| 4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
| 5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 6 |
+
#
|
| 7 |
+
from io import BytesIO
|
| 8 |
+
import logging
|
| 9 |
+
import os
|
| 10 |
+
import re
|
| 11 |
+
import struct
|
| 12 |
+
import sys
|
| 13 |
+
import time
|
| 14 |
+
from zipfile import ZipInfo
|
| 15 |
+
|
| 16 |
+
from .compat import sysconfig, detect_encoding, ZipFile
|
| 17 |
+
from .resources import finder
|
| 18 |
+
from .util import (FileOperator, get_export_entry, convert_path, get_executable, get_platform, in_venv)
|
| 19 |
+
|
| 20 |
+
logger = logging.getLogger(__name__)
|
| 21 |
+
|
| 22 |
+
_DEFAULT_MANIFEST = '''
|
| 23 |
+
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
| 24 |
+
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
| 25 |
+
<assemblyIdentity version="1.0.0.0"
|
| 26 |
+
processorArchitecture="X86"
|
| 27 |
+
name="%s"
|
| 28 |
+
type="win32"/>
|
| 29 |
+
|
| 30 |
+
<!-- Identify the application security requirements. -->
|
| 31 |
+
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
| 32 |
+
<security>
|
| 33 |
+
<requestedPrivileges>
|
| 34 |
+
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
|
| 35 |
+
</requestedPrivileges>
|
| 36 |
+
</security>
|
| 37 |
+
</trustInfo>
|
| 38 |
+
</assembly>'''.strip()
|
| 39 |
+
|
| 40 |
+
# check if Python is called on the first line with this expression
|
| 41 |
+
FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
|
| 42 |
+
SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*-
|
| 43 |
+
import re
|
| 44 |
+
import sys
|
| 45 |
+
from %(module)s import %(import_name)s
|
| 46 |
+
if __name__ == '__main__':
|
| 47 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
| 48 |
+
sys.exit(%(func)s())
|
| 49 |
+
'''
|
| 50 |
+
|
| 51 |
+
# Pre-fetch the contents of all executable wrapper stubs.
|
| 52 |
+
# This is to address https://github.com/pypa/pip/issues/12666.
|
| 53 |
+
# When updating pip, we rename the old pip in place before installing the
|
| 54 |
+
# new version. If we try to fetch a wrapper *after* that rename, the finder
|
| 55 |
+
# machinery will be confused as the package is no longer available at the
|
| 56 |
+
# location where it was imported from. So we load everything into memory in
|
| 57 |
+
# advance.
|
| 58 |
+
|
| 59 |
+
if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'):
|
| 60 |
+
# Issue 31: don't hardcode an absolute package name, but
|
| 61 |
+
# determine it relative to the current package
|
| 62 |
+
DISTLIB_PACKAGE = __name__.rsplit('.', 1)[0]
|
| 63 |
+
|
| 64 |
+
WRAPPERS = {
|
| 65 |
+
r.name: r.bytes
|
| 66 |
+
for r in finder(DISTLIB_PACKAGE).iterator("")
|
| 67 |
+
if r.name.endswith(".exe")
|
| 68 |
+
}
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def enquote_executable(executable):
|
| 72 |
+
if ' ' in executable:
|
| 73 |
+
# make sure we quote only the executable in case of env
|
| 74 |
+
# for example /usr/bin/env "/dir with spaces/bin/jython"
|
| 75 |
+
# instead of "/usr/bin/env /dir with spaces/bin/jython"
|
| 76 |
+
# otherwise whole
|
| 77 |
+
if executable.startswith('/usr/bin/env '):
|
| 78 |
+
env, _executable = executable.split(' ', 1)
|
| 79 |
+
if ' ' in _executable and not _executable.startswith('"'):
|
| 80 |
+
executable = '%s "%s"' % (env, _executable)
|
| 81 |
+
else:
|
| 82 |
+
if not executable.startswith('"'):
|
| 83 |
+
executable = '"%s"' % executable
|
| 84 |
+
return executable
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
# Keep the old name around (for now), as there is at least one project using it!
|
| 88 |
+
_enquote_executable = enquote_executable
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class ScriptMaker(object):
|
| 92 |
+
"""
|
| 93 |
+
A class to copy or create scripts from source scripts or callable
|
| 94 |
+
specifications.
|
| 95 |
+
"""
|
| 96 |
+
script_template = SCRIPT_TEMPLATE
|
| 97 |
+
|
| 98 |
+
executable = None # for shebangs
|
| 99 |
+
|
| 100 |
+
def __init__(self, source_dir, target_dir, add_launchers=True, dry_run=False, fileop=None):
|
| 101 |
+
self.source_dir = source_dir
|
| 102 |
+
self.target_dir = target_dir
|
| 103 |
+
self.add_launchers = add_launchers
|
| 104 |
+
self.force = False
|
| 105 |
+
self.clobber = False
|
| 106 |
+
# It only makes sense to set mode bits on POSIX.
|
| 107 |
+
self.set_mode = (os.name == 'posix') or (os.name == 'java' and os._name == 'posix')
|
| 108 |
+
self.variants = set(('', 'X.Y'))
|
| 109 |
+
self._fileop = fileop or FileOperator(dry_run)
|
| 110 |
+
|
| 111 |
+
self._is_nt = os.name == 'nt' or (os.name == 'java' and os._name == 'nt')
|
| 112 |
+
self.version_info = sys.version_info
|
| 113 |
+
|
| 114 |
+
def _get_alternate_executable(self, executable, options):
|
| 115 |
+
if options.get('gui', False) and self._is_nt: # pragma: no cover
|
| 116 |
+
dn, fn = os.path.split(executable)
|
| 117 |
+
fn = fn.replace('python', 'pythonw')
|
| 118 |
+
executable = os.path.join(dn, fn)
|
| 119 |
+
return executable
|
| 120 |
+
|
| 121 |
+
if sys.platform.startswith('java'): # pragma: no cover
|
| 122 |
+
|
| 123 |
+
def _is_shell(self, executable):
|
| 124 |
+
"""
|
| 125 |
+
Determine if the specified executable is a script
|
| 126 |
+
(contains a #! line)
|
| 127 |
+
"""
|
| 128 |
+
try:
|
| 129 |
+
with open(executable) as fp:
|
| 130 |
+
return fp.read(2) == '#!'
|
| 131 |
+
except (OSError, IOError):
|
| 132 |
+
logger.warning('Failed to open %s', executable)
|
| 133 |
+
return False
|
| 134 |
+
|
| 135 |
+
def _fix_jython_executable(self, executable):
|
| 136 |
+
if self._is_shell(executable):
|
| 137 |
+
# Workaround for Jython is not needed on Linux systems.
|
| 138 |
+
import java
|
| 139 |
+
|
| 140 |
+
if java.lang.System.getProperty('os.name') == 'Linux':
|
| 141 |
+
return executable
|
| 142 |
+
elif executable.lower().endswith('jython.exe'):
|
| 143 |
+
# Use wrapper exe for Jython on Windows
|
| 144 |
+
return executable
|
| 145 |
+
return '/usr/bin/env %s' % executable
|
| 146 |
+
|
| 147 |
+
def _build_shebang(self, executable, post_interp):
|
| 148 |
+
"""
|
| 149 |
+
Build a shebang line. In the simple case (on Windows, or a shebang line
|
| 150 |
+
which is not too long or contains spaces) use a simple formulation for
|
| 151 |
+
the shebang. Otherwise, use /bin/sh as the executable, with a contrived
|
| 152 |
+
shebang which allows the script to run either under Python or sh, using
|
| 153 |
+
suitable quoting. Thanks to Harald Nordgren for his input.
|
| 154 |
+
|
| 155 |
+
See also: http://www.in-ulm.de/~mascheck/various/shebang/#length
|
| 156 |
+
https://hg.mozilla.org/mozilla-central/file/tip/mach
|
| 157 |
+
"""
|
| 158 |
+
if os.name != 'posix':
|
| 159 |
+
simple_shebang = True
|
| 160 |
+
elif getattr(sys, "cross_compiling", False):
|
| 161 |
+
# In a cross-compiling environment, the shebang will likely be a
|
| 162 |
+
# script; this *must* be invoked with the "safe" version of the
|
| 163 |
+
# shebang, or else using os.exec() to run the entry script will
|
| 164 |
+
# fail, raising "OSError 8 [Errno 8] Exec format error".
|
| 165 |
+
simple_shebang = False
|
| 166 |
+
else:
|
| 167 |
+
# Add 3 for '#!' prefix and newline suffix.
|
| 168 |
+
shebang_length = len(executable) + len(post_interp) + 3
|
| 169 |
+
if sys.platform == 'darwin':
|
| 170 |
+
max_shebang_length = 512
|
| 171 |
+
else:
|
| 172 |
+
max_shebang_length = 127
|
| 173 |
+
simple_shebang = ((b' ' not in executable) and (shebang_length <= max_shebang_length))
|
| 174 |
+
|
| 175 |
+
if simple_shebang:
|
| 176 |
+
result = b'#!' + executable + post_interp + b'\n'
|
| 177 |
+
else:
|
| 178 |
+
result = b'#!/bin/sh\n'
|
| 179 |
+
result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n'
|
| 180 |
+
result += b"' '''\n"
|
| 181 |
+
return result
|
| 182 |
+
|
| 183 |
+
def _get_shebang(self, encoding, post_interp=b'', options=None):
|
| 184 |
+
enquote = True
|
| 185 |
+
if self.executable:
|
| 186 |
+
executable = self.executable
|
| 187 |
+
enquote = False # assume this will be taken care of
|
| 188 |
+
elif not sysconfig.is_python_build():
|
| 189 |
+
executable = get_executable()
|
| 190 |
+
elif in_venv(): # pragma: no cover
|
| 191 |
+
executable = os.path.join(sysconfig.get_path('scripts'), 'python%s' % sysconfig.get_config_var('EXE'))
|
| 192 |
+
else: # pragma: no cover
|
| 193 |
+
if os.name == 'nt':
|
| 194 |
+
# for Python builds from source on Windows, no Python executables with
|
| 195 |
+
# a version suffix are created, so we use python.exe
|
| 196 |
+
executable = os.path.join(sysconfig.get_config_var('BINDIR'),
|
| 197 |
+
'python%s' % (sysconfig.get_config_var('EXE')))
|
| 198 |
+
else:
|
| 199 |
+
executable = os.path.join(
|
| 200 |
+
sysconfig.get_config_var('BINDIR'),
|
| 201 |
+
'python%s%s' % (sysconfig.get_config_var('VERSION'), sysconfig.get_config_var('EXE')))
|
| 202 |
+
if options:
|
| 203 |
+
executable = self._get_alternate_executable(executable, options)
|
| 204 |
+
|
| 205 |
+
if sys.platform.startswith('java'): # pragma: no cover
|
| 206 |
+
executable = self._fix_jython_executable(executable)
|
| 207 |
+
|
| 208 |
+
# Normalise case for Windows - COMMENTED OUT
|
| 209 |
+
# executable = os.path.normcase(executable)
|
| 210 |
+
# N.B. The normalising operation above has been commented out: See
|
| 211 |
+
# issue #124. Although paths in Windows are generally case-insensitive,
|
| 212 |
+
# they aren't always. For example, a path containing a ẞ (which is a
|
| 213 |
+
# LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a
|
| 214 |
+
# LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by
|
| 215 |
+
# Windows as equivalent in path names.
|
| 216 |
+
|
| 217 |
+
# If the user didn't specify an executable, it may be necessary to
|
| 218 |
+
# cater for executable paths with spaces (not uncommon on Windows)
|
| 219 |
+
if enquote:
|
| 220 |
+
executable = enquote_executable(executable)
|
| 221 |
+
# Issue #51: don't use fsencode, since we later try to
|
| 222 |
+
# check that the shebang is decodable using utf-8.
|
| 223 |
+
executable = executable.encode('utf-8')
|
| 224 |
+
# in case of IronPython, play safe and enable frames support
|
| 225 |
+
if (sys.platform == 'cli' and '-X:Frames' not in post_interp and
|
| 226 |
+
'-X:FullFrames' not in post_interp): # pragma: no cover
|
| 227 |
+
post_interp += b' -X:Frames'
|
| 228 |
+
shebang = self._build_shebang(executable, post_interp)
|
| 229 |
+
# Python parser starts to read a script using UTF-8 until
|
| 230 |
+
# it gets a #coding:xxx cookie. The shebang has to be the
|
| 231 |
+
# first line of a file, the #coding:xxx cookie cannot be
|
| 232 |
+
# written before. So the shebang has to be decodable from
|
| 233 |
+
# UTF-8.
|
| 234 |
+
try:
|
| 235 |
+
shebang.decode('utf-8')
|
| 236 |
+
except UnicodeDecodeError: # pragma: no cover
|
| 237 |
+
raise ValueError('The shebang (%r) is not decodable from utf-8' % shebang)
|
| 238 |
+
# If the script is encoded to a custom encoding (use a
|
| 239 |
+
# #coding:xxx cookie), the shebang has to be decodable from
|
| 240 |
+
# the script encoding too.
|
| 241 |
+
if encoding != 'utf-8':
|
| 242 |
+
try:
|
| 243 |
+
shebang.decode(encoding)
|
| 244 |
+
except UnicodeDecodeError: # pragma: no cover
|
| 245 |
+
raise ValueError('The shebang (%r) is not decodable '
|
| 246 |
+
'from the script encoding (%r)' % (shebang, encoding))
|
| 247 |
+
return shebang
|
| 248 |
+
|
| 249 |
+
def _get_script_text(self, entry):
|
| 250 |
+
return self.script_template % dict(
|
| 251 |
+
module=entry.prefix, import_name=entry.suffix.split('.')[0], func=entry.suffix)
|
| 252 |
+
|
| 253 |
+
manifest = _DEFAULT_MANIFEST
|
| 254 |
+
|
| 255 |
+
def get_manifest(self, exename):
|
| 256 |
+
base = os.path.basename(exename)
|
| 257 |
+
return self.manifest % base
|
| 258 |
+
|
| 259 |
+
def _write_script(self, names, shebang, script_bytes, filenames, ext):
|
| 260 |
+
use_launcher = self.add_launchers and self._is_nt
|
| 261 |
+
if not use_launcher:
|
| 262 |
+
script_bytes = shebang + script_bytes
|
| 263 |
+
else: # pragma: no cover
|
| 264 |
+
if ext == 'py':
|
| 265 |
+
launcher = self._get_launcher('t')
|
| 266 |
+
else:
|
| 267 |
+
launcher = self._get_launcher('w')
|
| 268 |
+
stream = BytesIO()
|
| 269 |
+
with ZipFile(stream, 'w') as zf:
|
| 270 |
+
source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH')
|
| 271 |
+
if source_date_epoch:
|
| 272 |
+
date_time = time.gmtime(int(source_date_epoch))[:6]
|
| 273 |
+
zinfo = ZipInfo(filename='__main__.py', date_time=date_time)
|
| 274 |
+
zf.writestr(zinfo, script_bytes)
|
| 275 |
+
else:
|
| 276 |
+
zf.writestr('__main__.py', script_bytes)
|
| 277 |
+
zip_data = stream.getvalue()
|
| 278 |
+
script_bytes = launcher + shebang + zip_data
|
| 279 |
+
for name in names:
|
| 280 |
+
outname = os.path.join(self.target_dir, name)
|
| 281 |
+
if use_launcher: # pragma: no cover
|
| 282 |
+
n, e = os.path.splitext(outname)
|
| 283 |
+
if e.startswith('.py'):
|
| 284 |
+
outname = n
|
| 285 |
+
outname = '%s.exe' % outname
|
| 286 |
+
try:
|
| 287 |
+
self._fileop.write_binary_file(outname, script_bytes)
|
| 288 |
+
except Exception:
|
| 289 |
+
# Failed writing an executable - it might be in use.
|
| 290 |
+
logger.warning('Failed to write executable - trying to '
|
| 291 |
+
'use .deleteme logic')
|
| 292 |
+
dfname = '%s.deleteme' % outname
|
| 293 |
+
if os.path.exists(dfname):
|
| 294 |
+
os.remove(dfname) # Not allowed to fail here
|
| 295 |
+
os.rename(outname, dfname) # nor here
|
| 296 |
+
self._fileop.write_binary_file(outname, script_bytes)
|
| 297 |
+
logger.debug('Able to replace executable using '
|
| 298 |
+
'.deleteme logic')
|
| 299 |
+
try:
|
| 300 |
+
os.remove(dfname)
|
| 301 |
+
except Exception:
|
| 302 |
+
pass # still in use - ignore error
|
| 303 |
+
else:
|
| 304 |
+
if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover
|
| 305 |
+
outname = '%s.%s' % (outname, ext)
|
| 306 |
+
if os.path.exists(outname) and not self.clobber:
|
| 307 |
+
logger.warning('Skipping existing file %s', outname)
|
| 308 |
+
continue
|
| 309 |
+
self._fileop.write_binary_file(outname, script_bytes)
|
| 310 |
+
if self.set_mode:
|
| 311 |
+
self._fileop.set_executable_mode([outname])
|
| 312 |
+
filenames.append(outname)
|
| 313 |
+
|
| 314 |
+
variant_separator = '-'
|
| 315 |
+
|
| 316 |
+
def get_script_filenames(self, name):
|
| 317 |
+
result = set()
|
| 318 |
+
if '' in self.variants:
|
| 319 |
+
result.add(name)
|
| 320 |
+
if 'X' in self.variants:
|
| 321 |
+
result.add('%s%s' % (name, self.version_info[0]))
|
| 322 |
+
if 'X.Y' in self.variants:
|
| 323 |
+
result.add('%s%s%s.%s' % (name, self.variant_separator, self.version_info[0], self.version_info[1]))
|
| 324 |
+
return result
|
| 325 |
+
|
| 326 |
+
def _make_script(self, entry, filenames, options=None):
|
| 327 |
+
post_interp = b''
|
| 328 |
+
if options:
|
| 329 |
+
args = options.get('interpreter_args', [])
|
| 330 |
+
if args:
|
| 331 |
+
args = ' %s' % ' '.join(args)
|
| 332 |
+
post_interp = args.encode('utf-8')
|
| 333 |
+
shebang = self._get_shebang('utf-8', post_interp, options=options)
|
| 334 |
+
script = self._get_script_text(entry).encode('utf-8')
|
| 335 |
+
scriptnames = self.get_script_filenames(entry.name)
|
| 336 |
+
if options and options.get('gui', False):
|
| 337 |
+
ext = 'pyw'
|
| 338 |
+
else:
|
| 339 |
+
ext = 'py'
|
| 340 |
+
self._write_script(scriptnames, shebang, script, filenames, ext)
|
| 341 |
+
|
| 342 |
+
def _copy_script(self, script, filenames):
|
| 343 |
+
adjust = False
|
| 344 |
+
script = os.path.join(self.source_dir, convert_path(script))
|
| 345 |
+
outname = os.path.join(self.target_dir, os.path.basename(script))
|
| 346 |
+
if not self.force and not self._fileop.newer(script, outname):
|
| 347 |
+
logger.debug('not copying %s (up-to-date)', script)
|
| 348 |
+
return
|
| 349 |
+
|
| 350 |
+
# Always open the file, but ignore failures in dry-run mode --
|
| 351 |
+
# that way, we'll get accurate feedback if we can read the
|
| 352 |
+
# script.
|
| 353 |
+
try:
|
| 354 |
+
f = open(script, 'rb')
|
| 355 |
+
except IOError: # pragma: no cover
|
| 356 |
+
if not self.dry_run:
|
| 357 |
+
raise
|
| 358 |
+
f = None
|
| 359 |
+
else:
|
| 360 |
+
first_line = f.readline()
|
| 361 |
+
if not first_line: # pragma: no cover
|
| 362 |
+
logger.warning('%s is an empty file (skipping)', script)
|
| 363 |
+
return
|
| 364 |
+
|
| 365 |
+
match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
|
| 366 |
+
if match:
|
| 367 |
+
adjust = True
|
| 368 |
+
post_interp = match.group(1) or b''
|
| 369 |
+
|
| 370 |
+
if not adjust:
|
| 371 |
+
if f:
|
| 372 |
+
f.close()
|
| 373 |
+
self._fileop.copy_file(script, outname)
|
| 374 |
+
if self.set_mode:
|
| 375 |
+
self._fileop.set_executable_mode([outname])
|
| 376 |
+
filenames.append(outname)
|
| 377 |
+
else:
|
| 378 |
+
logger.info('copying and adjusting %s -> %s', script, self.target_dir)
|
| 379 |
+
if not self._fileop.dry_run:
|
| 380 |
+
encoding, lines = detect_encoding(f.readline)
|
| 381 |
+
f.seek(0)
|
| 382 |
+
shebang = self._get_shebang(encoding, post_interp)
|
| 383 |
+
if b'pythonw' in first_line: # pragma: no cover
|
| 384 |
+
ext = 'pyw'
|
| 385 |
+
else:
|
| 386 |
+
ext = 'py'
|
| 387 |
+
n = os.path.basename(outname)
|
| 388 |
+
self._write_script([n], shebang, f.read(), filenames, ext)
|
| 389 |
+
if f:
|
| 390 |
+
f.close()
|
| 391 |
+
|
| 392 |
+
@property
|
| 393 |
+
def dry_run(self):
|
| 394 |
+
return self._fileop.dry_run
|
| 395 |
+
|
| 396 |
+
@dry_run.setter
|
| 397 |
+
def dry_run(self, value):
|
| 398 |
+
self._fileop.dry_run = value
|
| 399 |
+
|
| 400 |
+
if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover
|
| 401 |
+
# Executable launcher support.
|
| 402 |
+
# Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
|
| 403 |
+
|
| 404 |
+
def _get_launcher(self, kind):
|
| 405 |
+
if struct.calcsize('P') == 8: # 64-bit
|
| 406 |
+
bits = '64'
|
| 407 |
+
else:
|
| 408 |
+
bits = '32'
|
| 409 |
+
platform_suffix = '-arm' if get_platform() == 'win-arm64' else ''
|
| 410 |
+
name = '%s%s%s.exe' % (kind, bits, platform_suffix)
|
| 411 |
+
if name not in WRAPPERS:
|
| 412 |
+
msg = ('Unable to find resource %s in package %s' %
|
| 413 |
+
(name, DISTLIB_PACKAGE))
|
| 414 |
+
raise ValueError(msg)
|
| 415 |
+
return WRAPPERS[name]
|
| 416 |
+
|
| 417 |
+
# Public API follows
|
| 418 |
+
|
| 419 |
+
def make(self, specification, options=None):
|
| 420 |
+
"""
|
| 421 |
+
Make a script.
|
| 422 |
+
|
| 423 |
+
:param specification: The specification, which is either a valid export
|
| 424 |
+
entry specification (to make a script from a
|
| 425 |
+
callable) or a filename (to make a script by
|
| 426 |
+
copying from a source location).
|
| 427 |
+
:param options: A dictionary of options controlling script generation.
|
| 428 |
+
:return: A list of all absolute pathnames written to.
|
| 429 |
+
"""
|
| 430 |
+
filenames = []
|
| 431 |
+
entry = get_export_entry(specification)
|
| 432 |
+
if entry is None:
|
| 433 |
+
self._copy_script(specification, filenames)
|
| 434 |
+
else:
|
| 435 |
+
self._make_script(entry, filenames, options=options)
|
| 436 |
+
return filenames
|
| 437 |
+
|
| 438 |
+
def make_multiple(self, specifications, options=None):
|
| 439 |
+
"""
|
| 440 |
+
Take a list of specifications and make scripts from them,
|
| 441 |
+
:param specifications: A list of specifications.
|
| 442 |
+
:return: A list of all absolute pathnames written to,
|
| 443 |
+
"""
|
| 444 |
+
filenames = []
|
| 445 |
+
for specification in specifications:
|
| 446 |
+
filenames.extend(self.make(specification, options))
|
| 447 |
+
return filenames
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/util.py
ADDED
|
@@ -0,0 +1,1984 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Copyright (C) 2012-2023 The Python Software Foundation.
|
| 3 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 4 |
+
#
|
| 5 |
+
import codecs
|
| 6 |
+
from collections import deque
|
| 7 |
+
import contextlib
|
| 8 |
+
import csv
|
| 9 |
+
from glob import iglob as std_iglob
|
| 10 |
+
import io
|
| 11 |
+
import json
|
| 12 |
+
import logging
|
| 13 |
+
import os
|
| 14 |
+
import py_compile
|
| 15 |
+
import re
|
| 16 |
+
import socket
|
| 17 |
+
try:
|
| 18 |
+
import ssl
|
| 19 |
+
except ImportError: # pragma: no cover
|
| 20 |
+
ssl = None
|
| 21 |
+
import subprocess
|
| 22 |
+
import sys
|
| 23 |
+
import tarfile
|
| 24 |
+
import tempfile
|
| 25 |
+
import textwrap
|
| 26 |
+
|
| 27 |
+
try:
|
| 28 |
+
import threading
|
| 29 |
+
except ImportError: # pragma: no cover
|
| 30 |
+
import dummy_threading as threading
|
| 31 |
+
import time
|
| 32 |
+
|
| 33 |
+
from . import DistlibException
|
| 34 |
+
from .compat import (string_types, text_type, shutil, raw_input, StringIO, cache_from_source, urlopen, urljoin, httplib,
|
| 35 |
+
xmlrpclib, HTTPHandler, BaseConfigurator, valid_ident, Container, configparser, URLError, ZipFile,
|
| 36 |
+
fsdecode, unquote, urlparse)
|
| 37 |
+
|
| 38 |
+
logger = logging.getLogger(__name__)
|
| 39 |
+
|
| 40 |
+
#
|
| 41 |
+
# Requirement parsing code as per PEP 508
|
| 42 |
+
#
|
| 43 |
+
|
| 44 |
+
IDENTIFIER = re.compile(r'^([\w\.-]+)\s*')
|
| 45 |
+
VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*')
|
| 46 |
+
COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*')
|
| 47 |
+
MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*')
|
| 48 |
+
OR = re.compile(r'^or\b\s*')
|
| 49 |
+
AND = re.compile(r'^and\b\s*')
|
| 50 |
+
NON_SPACE = re.compile(r'(\S+)\s*')
|
| 51 |
+
STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)')
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def parse_marker(marker_string):
|
| 55 |
+
"""
|
| 56 |
+
Parse a marker string and return a dictionary containing a marker expression.
|
| 57 |
+
|
| 58 |
+
The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in
|
| 59 |
+
the expression grammar, or strings. A string contained in quotes is to be
|
| 60 |
+
interpreted as a literal string, and a string not contained in quotes is a
|
| 61 |
+
variable (such as os_name).
|
| 62 |
+
"""
|
| 63 |
+
|
| 64 |
+
def marker_var(remaining):
|
| 65 |
+
# either identifier, or literal string
|
| 66 |
+
m = IDENTIFIER.match(remaining)
|
| 67 |
+
if m:
|
| 68 |
+
result = m.groups()[0]
|
| 69 |
+
remaining = remaining[m.end():]
|
| 70 |
+
elif not remaining:
|
| 71 |
+
raise SyntaxError('unexpected end of input')
|
| 72 |
+
else:
|
| 73 |
+
q = remaining[0]
|
| 74 |
+
if q not in '\'"':
|
| 75 |
+
raise SyntaxError('invalid expression: %s' % remaining)
|
| 76 |
+
oq = '\'"'.replace(q, '')
|
| 77 |
+
remaining = remaining[1:]
|
| 78 |
+
parts = [q]
|
| 79 |
+
while remaining:
|
| 80 |
+
# either a string chunk, or oq, or q to terminate
|
| 81 |
+
if remaining[0] == q:
|
| 82 |
+
break
|
| 83 |
+
elif remaining[0] == oq:
|
| 84 |
+
parts.append(oq)
|
| 85 |
+
remaining = remaining[1:]
|
| 86 |
+
else:
|
| 87 |
+
m = STRING_CHUNK.match(remaining)
|
| 88 |
+
if not m:
|
| 89 |
+
raise SyntaxError('error in string literal: %s' % remaining)
|
| 90 |
+
parts.append(m.groups()[0])
|
| 91 |
+
remaining = remaining[m.end():]
|
| 92 |
+
else:
|
| 93 |
+
s = ''.join(parts)
|
| 94 |
+
raise SyntaxError('unterminated string: %s' % s)
|
| 95 |
+
parts.append(q)
|
| 96 |
+
result = ''.join(parts)
|
| 97 |
+
remaining = remaining[1:].lstrip() # skip past closing quote
|
| 98 |
+
return result, remaining
|
| 99 |
+
|
| 100 |
+
def marker_expr(remaining):
|
| 101 |
+
if remaining and remaining[0] == '(':
|
| 102 |
+
result, remaining = marker(remaining[1:].lstrip())
|
| 103 |
+
if remaining[0] != ')':
|
| 104 |
+
raise SyntaxError('unterminated parenthesis: %s' % remaining)
|
| 105 |
+
remaining = remaining[1:].lstrip()
|
| 106 |
+
else:
|
| 107 |
+
lhs, remaining = marker_var(remaining)
|
| 108 |
+
while remaining:
|
| 109 |
+
m = MARKER_OP.match(remaining)
|
| 110 |
+
if not m:
|
| 111 |
+
break
|
| 112 |
+
op = m.groups()[0]
|
| 113 |
+
remaining = remaining[m.end():]
|
| 114 |
+
rhs, remaining = marker_var(remaining)
|
| 115 |
+
lhs = {'op': op, 'lhs': lhs, 'rhs': rhs}
|
| 116 |
+
result = lhs
|
| 117 |
+
return result, remaining
|
| 118 |
+
|
| 119 |
+
def marker_and(remaining):
|
| 120 |
+
lhs, remaining = marker_expr(remaining)
|
| 121 |
+
while remaining:
|
| 122 |
+
m = AND.match(remaining)
|
| 123 |
+
if not m:
|
| 124 |
+
break
|
| 125 |
+
remaining = remaining[m.end():]
|
| 126 |
+
rhs, remaining = marker_expr(remaining)
|
| 127 |
+
lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs}
|
| 128 |
+
return lhs, remaining
|
| 129 |
+
|
| 130 |
+
def marker(remaining):
|
| 131 |
+
lhs, remaining = marker_and(remaining)
|
| 132 |
+
while remaining:
|
| 133 |
+
m = OR.match(remaining)
|
| 134 |
+
if not m:
|
| 135 |
+
break
|
| 136 |
+
remaining = remaining[m.end():]
|
| 137 |
+
rhs, remaining = marker_and(remaining)
|
| 138 |
+
lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs}
|
| 139 |
+
return lhs, remaining
|
| 140 |
+
|
| 141 |
+
return marker(marker_string)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def parse_requirement(req):
|
| 145 |
+
"""
|
| 146 |
+
Parse a requirement passed in as a string. Return a Container
|
| 147 |
+
whose attributes contain the various parts of the requirement.
|
| 148 |
+
"""
|
| 149 |
+
remaining = req.strip()
|
| 150 |
+
if not remaining or remaining.startswith('#'):
|
| 151 |
+
return None
|
| 152 |
+
m = IDENTIFIER.match(remaining)
|
| 153 |
+
if not m:
|
| 154 |
+
raise SyntaxError('name expected: %s' % remaining)
|
| 155 |
+
distname = m.groups()[0]
|
| 156 |
+
remaining = remaining[m.end():]
|
| 157 |
+
extras = mark_expr = versions = uri = None
|
| 158 |
+
if remaining and remaining[0] == '[':
|
| 159 |
+
i = remaining.find(']', 1)
|
| 160 |
+
if i < 0:
|
| 161 |
+
raise SyntaxError('unterminated extra: %s' % remaining)
|
| 162 |
+
s = remaining[1:i]
|
| 163 |
+
remaining = remaining[i + 1:].lstrip()
|
| 164 |
+
extras = []
|
| 165 |
+
while s:
|
| 166 |
+
m = IDENTIFIER.match(s)
|
| 167 |
+
if not m:
|
| 168 |
+
raise SyntaxError('malformed extra: %s' % s)
|
| 169 |
+
extras.append(m.groups()[0])
|
| 170 |
+
s = s[m.end():]
|
| 171 |
+
if not s:
|
| 172 |
+
break
|
| 173 |
+
if s[0] != ',':
|
| 174 |
+
raise SyntaxError('comma expected in extras: %s' % s)
|
| 175 |
+
s = s[1:].lstrip()
|
| 176 |
+
if not extras:
|
| 177 |
+
extras = None
|
| 178 |
+
if remaining:
|
| 179 |
+
if remaining[0] == '@':
|
| 180 |
+
# it's a URI
|
| 181 |
+
remaining = remaining[1:].lstrip()
|
| 182 |
+
m = NON_SPACE.match(remaining)
|
| 183 |
+
if not m:
|
| 184 |
+
raise SyntaxError('invalid URI: %s' % remaining)
|
| 185 |
+
uri = m.groups()[0]
|
| 186 |
+
t = urlparse(uri)
|
| 187 |
+
# there are issues with Python and URL parsing, so this test
|
| 188 |
+
# is a bit crude. See bpo-20271, bpo-23505. Python doesn't
|
| 189 |
+
# always parse invalid URLs correctly - it should raise
|
| 190 |
+
# exceptions for malformed URLs
|
| 191 |
+
if not (t.scheme and t.netloc):
|
| 192 |
+
raise SyntaxError('Invalid URL: %s' % uri)
|
| 193 |
+
remaining = remaining[m.end():].lstrip()
|
| 194 |
+
else:
|
| 195 |
+
|
| 196 |
+
def get_versions(ver_remaining):
|
| 197 |
+
"""
|
| 198 |
+
Return a list of operator, version tuples if any are
|
| 199 |
+
specified, else None.
|
| 200 |
+
"""
|
| 201 |
+
m = COMPARE_OP.match(ver_remaining)
|
| 202 |
+
versions = None
|
| 203 |
+
if m:
|
| 204 |
+
versions = []
|
| 205 |
+
while True:
|
| 206 |
+
op = m.groups()[0]
|
| 207 |
+
ver_remaining = ver_remaining[m.end():]
|
| 208 |
+
m = VERSION_IDENTIFIER.match(ver_remaining)
|
| 209 |
+
if not m:
|
| 210 |
+
raise SyntaxError('invalid version: %s' % ver_remaining)
|
| 211 |
+
v = m.groups()[0]
|
| 212 |
+
versions.append((op, v))
|
| 213 |
+
ver_remaining = ver_remaining[m.end():]
|
| 214 |
+
if not ver_remaining or ver_remaining[0] != ',':
|
| 215 |
+
break
|
| 216 |
+
ver_remaining = ver_remaining[1:].lstrip()
|
| 217 |
+
# Some packages have a trailing comma which would break things
|
| 218 |
+
# See issue #148
|
| 219 |
+
if not ver_remaining:
|
| 220 |
+
break
|
| 221 |
+
m = COMPARE_OP.match(ver_remaining)
|
| 222 |
+
if not m:
|
| 223 |
+
raise SyntaxError('invalid constraint: %s' % ver_remaining)
|
| 224 |
+
if not versions:
|
| 225 |
+
versions = None
|
| 226 |
+
return versions, ver_remaining
|
| 227 |
+
|
| 228 |
+
if remaining[0] != '(':
|
| 229 |
+
versions, remaining = get_versions(remaining)
|
| 230 |
+
else:
|
| 231 |
+
i = remaining.find(')', 1)
|
| 232 |
+
if i < 0:
|
| 233 |
+
raise SyntaxError('unterminated parenthesis: %s' % remaining)
|
| 234 |
+
s = remaining[1:i]
|
| 235 |
+
remaining = remaining[i + 1:].lstrip()
|
| 236 |
+
# As a special diversion from PEP 508, allow a version number
|
| 237 |
+
# a.b.c in parentheses as a synonym for ~= a.b.c (because this
|
| 238 |
+
# is allowed in earlier PEPs)
|
| 239 |
+
if COMPARE_OP.match(s):
|
| 240 |
+
versions, _ = get_versions(s)
|
| 241 |
+
else:
|
| 242 |
+
m = VERSION_IDENTIFIER.match(s)
|
| 243 |
+
if not m:
|
| 244 |
+
raise SyntaxError('invalid constraint: %s' % s)
|
| 245 |
+
v = m.groups()[0]
|
| 246 |
+
s = s[m.end():].lstrip()
|
| 247 |
+
if s:
|
| 248 |
+
raise SyntaxError('invalid constraint: %s' % s)
|
| 249 |
+
versions = [('~=', v)]
|
| 250 |
+
|
| 251 |
+
if remaining:
|
| 252 |
+
if remaining[0] != ';':
|
| 253 |
+
raise SyntaxError('invalid requirement: %s' % remaining)
|
| 254 |
+
remaining = remaining[1:].lstrip()
|
| 255 |
+
|
| 256 |
+
mark_expr, remaining = parse_marker(remaining)
|
| 257 |
+
|
| 258 |
+
if remaining and remaining[0] != '#':
|
| 259 |
+
raise SyntaxError('unexpected trailing data: %s' % remaining)
|
| 260 |
+
|
| 261 |
+
if not versions:
|
| 262 |
+
rs = distname
|
| 263 |
+
else:
|
| 264 |
+
rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions]))
|
| 265 |
+
return Container(name=distname, extras=extras, constraints=versions, marker=mark_expr, url=uri, requirement=rs)
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
def get_resources_dests(resources_root, rules):
|
| 269 |
+
"""Find destinations for resources files"""
|
| 270 |
+
|
| 271 |
+
def get_rel_path(root, path):
|
| 272 |
+
# normalizes and returns a lstripped-/-separated path
|
| 273 |
+
root = root.replace(os.path.sep, '/')
|
| 274 |
+
path = path.replace(os.path.sep, '/')
|
| 275 |
+
assert path.startswith(root)
|
| 276 |
+
return path[len(root):].lstrip('/')
|
| 277 |
+
|
| 278 |
+
destinations = {}
|
| 279 |
+
for base, suffix, dest in rules:
|
| 280 |
+
prefix = os.path.join(resources_root, base)
|
| 281 |
+
for abs_base in iglob(prefix):
|
| 282 |
+
abs_glob = os.path.join(abs_base, suffix)
|
| 283 |
+
for abs_path in iglob(abs_glob):
|
| 284 |
+
resource_file = get_rel_path(resources_root, abs_path)
|
| 285 |
+
if dest is None: # remove the entry if it was here
|
| 286 |
+
destinations.pop(resource_file, None)
|
| 287 |
+
else:
|
| 288 |
+
rel_path = get_rel_path(abs_base, abs_path)
|
| 289 |
+
rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
|
| 290 |
+
destinations[resource_file] = rel_dest + '/' + rel_path
|
| 291 |
+
return destinations
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
def in_venv():
|
| 295 |
+
if hasattr(sys, 'real_prefix'):
|
| 296 |
+
# virtualenv venvs
|
| 297 |
+
result = True
|
| 298 |
+
else:
|
| 299 |
+
# PEP 405 venvs
|
| 300 |
+
result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
|
| 301 |
+
return result
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
def get_executable():
|
| 305 |
+
# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as
|
| 306 |
+
# changes to the stub launcher mean that sys.executable always points
|
| 307 |
+
# to the stub on OS X
|
| 308 |
+
# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'
|
| 309 |
+
# in os.environ):
|
| 310 |
+
# result = os.environ['__PYVENV_LAUNCHER__']
|
| 311 |
+
# else:
|
| 312 |
+
# result = sys.executable
|
| 313 |
+
# return result
|
| 314 |
+
# Avoid normcasing: see issue #143
|
| 315 |
+
# result = os.path.normcase(sys.executable)
|
| 316 |
+
result = sys.executable
|
| 317 |
+
if not isinstance(result, text_type):
|
| 318 |
+
result = fsdecode(result)
|
| 319 |
+
return result
|
| 320 |
+
|
| 321 |
+
|
| 322 |
+
def proceed(prompt, allowed_chars, error_prompt=None, default=None):
|
| 323 |
+
p = prompt
|
| 324 |
+
while True:
|
| 325 |
+
s = raw_input(p)
|
| 326 |
+
p = prompt
|
| 327 |
+
if not s and default:
|
| 328 |
+
s = default
|
| 329 |
+
if s:
|
| 330 |
+
c = s[0].lower()
|
| 331 |
+
if c in allowed_chars:
|
| 332 |
+
break
|
| 333 |
+
if error_prompt:
|
| 334 |
+
p = '%c: %s\n%s' % (c, error_prompt, prompt)
|
| 335 |
+
return c
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
def extract_by_key(d, keys):
|
| 339 |
+
if isinstance(keys, string_types):
|
| 340 |
+
keys = keys.split()
|
| 341 |
+
result = {}
|
| 342 |
+
for key in keys:
|
| 343 |
+
if key in d:
|
| 344 |
+
result[key] = d[key]
|
| 345 |
+
return result
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
def read_exports(stream):
|
| 349 |
+
if sys.version_info[0] >= 3:
|
| 350 |
+
# needs to be a text stream
|
| 351 |
+
stream = codecs.getreader('utf-8')(stream)
|
| 352 |
+
# Try to load as JSON, falling back on legacy format
|
| 353 |
+
data = stream.read()
|
| 354 |
+
stream = StringIO(data)
|
| 355 |
+
try:
|
| 356 |
+
jdata = json.load(stream)
|
| 357 |
+
result = jdata['extensions']['python.exports']['exports']
|
| 358 |
+
for group, entries in result.items():
|
| 359 |
+
for k, v in entries.items():
|
| 360 |
+
s = '%s = %s' % (k, v)
|
| 361 |
+
entry = get_export_entry(s)
|
| 362 |
+
assert entry is not None
|
| 363 |
+
entries[k] = entry
|
| 364 |
+
return result
|
| 365 |
+
except Exception:
|
| 366 |
+
stream.seek(0, 0)
|
| 367 |
+
|
| 368 |
+
def read_stream(cp, stream):
|
| 369 |
+
if hasattr(cp, 'read_file'):
|
| 370 |
+
cp.read_file(stream)
|
| 371 |
+
else:
|
| 372 |
+
cp.readfp(stream)
|
| 373 |
+
|
| 374 |
+
cp = configparser.ConfigParser()
|
| 375 |
+
try:
|
| 376 |
+
read_stream(cp, stream)
|
| 377 |
+
except configparser.MissingSectionHeaderError:
|
| 378 |
+
stream.close()
|
| 379 |
+
data = textwrap.dedent(data)
|
| 380 |
+
stream = StringIO(data)
|
| 381 |
+
read_stream(cp, stream)
|
| 382 |
+
|
| 383 |
+
result = {}
|
| 384 |
+
for key in cp.sections():
|
| 385 |
+
result[key] = entries = {}
|
| 386 |
+
for name, value in cp.items(key):
|
| 387 |
+
s = '%s = %s' % (name, value)
|
| 388 |
+
entry = get_export_entry(s)
|
| 389 |
+
assert entry is not None
|
| 390 |
+
# entry.dist = self
|
| 391 |
+
entries[name] = entry
|
| 392 |
+
return result
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
def write_exports(exports, stream):
|
| 396 |
+
if sys.version_info[0] >= 3:
|
| 397 |
+
# needs to be a text stream
|
| 398 |
+
stream = codecs.getwriter('utf-8')(stream)
|
| 399 |
+
cp = configparser.ConfigParser()
|
| 400 |
+
for k, v in exports.items():
|
| 401 |
+
# TODO check k, v for valid values
|
| 402 |
+
cp.add_section(k)
|
| 403 |
+
for entry in v.values():
|
| 404 |
+
if entry.suffix is None:
|
| 405 |
+
s = entry.prefix
|
| 406 |
+
else:
|
| 407 |
+
s = '%s:%s' % (entry.prefix, entry.suffix)
|
| 408 |
+
if entry.flags:
|
| 409 |
+
s = '%s [%s]' % (s, ', '.join(entry.flags))
|
| 410 |
+
cp.set(k, entry.name, s)
|
| 411 |
+
cp.write(stream)
|
| 412 |
+
|
| 413 |
+
|
| 414 |
+
@contextlib.contextmanager
|
| 415 |
+
def tempdir():
|
| 416 |
+
td = tempfile.mkdtemp()
|
| 417 |
+
try:
|
| 418 |
+
yield td
|
| 419 |
+
finally:
|
| 420 |
+
shutil.rmtree(td)
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
@contextlib.contextmanager
|
| 424 |
+
def chdir(d):
|
| 425 |
+
cwd = os.getcwd()
|
| 426 |
+
try:
|
| 427 |
+
os.chdir(d)
|
| 428 |
+
yield
|
| 429 |
+
finally:
|
| 430 |
+
os.chdir(cwd)
|
| 431 |
+
|
| 432 |
+
|
| 433 |
+
@contextlib.contextmanager
|
| 434 |
+
def socket_timeout(seconds=15):
|
| 435 |
+
cto = socket.getdefaulttimeout()
|
| 436 |
+
try:
|
| 437 |
+
socket.setdefaulttimeout(seconds)
|
| 438 |
+
yield
|
| 439 |
+
finally:
|
| 440 |
+
socket.setdefaulttimeout(cto)
|
| 441 |
+
|
| 442 |
+
|
| 443 |
+
class cached_property(object):
|
| 444 |
+
|
| 445 |
+
def __init__(self, func):
|
| 446 |
+
self.func = func
|
| 447 |
+
# for attr in ('__name__', '__module__', '__doc__'):
|
| 448 |
+
# setattr(self, attr, getattr(func, attr, None))
|
| 449 |
+
|
| 450 |
+
def __get__(self, obj, cls=None):
|
| 451 |
+
if obj is None:
|
| 452 |
+
return self
|
| 453 |
+
value = self.func(obj)
|
| 454 |
+
object.__setattr__(obj, self.func.__name__, value)
|
| 455 |
+
# obj.__dict__[self.func.__name__] = value = self.func(obj)
|
| 456 |
+
return value
|
| 457 |
+
|
| 458 |
+
|
| 459 |
+
def convert_path(pathname):
|
| 460 |
+
"""Return 'pathname' as a name that will work on the native filesystem.
|
| 461 |
+
|
| 462 |
+
The path is split on '/' and put back together again using the current
|
| 463 |
+
directory separator. Needed because filenames in the setup script are
|
| 464 |
+
always supplied in Unix style, and have to be converted to the local
|
| 465 |
+
convention before we can actually use them in the filesystem. Raises
|
| 466 |
+
ValueError on non-Unix-ish systems if 'pathname' either starts or
|
| 467 |
+
ends with a slash.
|
| 468 |
+
"""
|
| 469 |
+
if os.sep == '/':
|
| 470 |
+
return pathname
|
| 471 |
+
if not pathname:
|
| 472 |
+
return pathname
|
| 473 |
+
if pathname[0] == '/':
|
| 474 |
+
raise ValueError("path '%s' cannot be absolute" % pathname)
|
| 475 |
+
if pathname[-1] == '/':
|
| 476 |
+
raise ValueError("path '%s' cannot end with '/'" % pathname)
|
| 477 |
+
|
| 478 |
+
paths = pathname.split('/')
|
| 479 |
+
while os.curdir in paths:
|
| 480 |
+
paths.remove(os.curdir)
|
| 481 |
+
if not paths:
|
| 482 |
+
return os.curdir
|
| 483 |
+
return os.path.join(*paths)
|
| 484 |
+
|
| 485 |
+
|
| 486 |
+
class FileOperator(object):
|
| 487 |
+
|
| 488 |
+
def __init__(self, dry_run=False):
|
| 489 |
+
self.dry_run = dry_run
|
| 490 |
+
self.ensured = set()
|
| 491 |
+
self._init_record()
|
| 492 |
+
|
| 493 |
+
def _init_record(self):
|
| 494 |
+
self.record = False
|
| 495 |
+
self.files_written = set()
|
| 496 |
+
self.dirs_created = set()
|
| 497 |
+
|
| 498 |
+
def record_as_written(self, path):
|
| 499 |
+
if self.record:
|
| 500 |
+
self.files_written.add(path)
|
| 501 |
+
|
| 502 |
+
def newer(self, source, target):
|
| 503 |
+
"""Tell if the target is newer than the source.
|
| 504 |
+
|
| 505 |
+
Returns true if 'source' exists and is more recently modified than
|
| 506 |
+
'target', or if 'source' exists and 'target' doesn't.
|
| 507 |
+
|
| 508 |
+
Returns false if both exist and 'target' is the same age or younger
|
| 509 |
+
than 'source'. Raise PackagingFileError if 'source' does not exist.
|
| 510 |
+
|
| 511 |
+
Note that this test is not very accurate: files created in the same
|
| 512 |
+
second will have the same "age".
|
| 513 |
+
"""
|
| 514 |
+
if not os.path.exists(source):
|
| 515 |
+
raise DistlibException("file '%r' does not exist" % os.path.abspath(source))
|
| 516 |
+
if not os.path.exists(target):
|
| 517 |
+
return True
|
| 518 |
+
|
| 519 |
+
return os.stat(source).st_mtime > os.stat(target).st_mtime
|
| 520 |
+
|
| 521 |
+
def copy_file(self, infile, outfile, check=True):
|
| 522 |
+
"""Copy a file respecting dry-run and force flags.
|
| 523 |
+
"""
|
| 524 |
+
self.ensure_dir(os.path.dirname(outfile))
|
| 525 |
+
logger.info('Copying %s to %s', infile, outfile)
|
| 526 |
+
if not self.dry_run:
|
| 527 |
+
msg = None
|
| 528 |
+
if check:
|
| 529 |
+
if os.path.islink(outfile):
|
| 530 |
+
msg = '%s is a symlink' % outfile
|
| 531 |
+
elif os.path.exists(outfile) and not os.path.isfile(outfile):
|
| 532 |
+
msg = '%s is a non-regular file' % outfile
|
| 533 |
+
if msg:
|
| 534 |
+
raise ValueError(msg + ' which would be overwritten')
|
| 535 |
+
shutil.copyfile(infile, outfile)
|
| 536 |
+
self.record_as_written(outfile)
|
| 537 |
+
|
| 538 |
+
def copy_stream(self, instream, outfile, encoding=None):
|
| 539 |
+
assert not os.path.isdir(outfile)
|
| 540 |
+
self.ensure_dir(os.path.dirname(outfile))
|
| 541 |
+
logger.info('Copying stream %s to %s', instream, outfile)
|
| 542 |
+
if not self.dry_run:
|
| 543 |
+
if encoding is None:
|
| 544 |
+
outstream = open(outfile, 'wb')
|
| 545 |
+
else:
|
| 546 |
+
outstream = codecs.open(outfile, 'w', encoding=encoding)
|
| 547 |
+
try:
|
| 548 |
+
shutil.copyfileobj(instream, outstream)
|
| 549 |
+
finally:
|
| 550 |
+
outstream.close()
|
| 551 |
+
self.record_as_written(outfile)
|
| 552 |
+
|
| 553 |
+
def write_binary_file(self, path, data):
|
| 554 |
+
self.ensure_dir(os.path.dirname(path))
|
| 555 |
+
if not self.dry_run:
|
| 556 |
+
if os.path.exists(path):
|
| 557 |
+
os.remove(path)
|
| 558 |
+
with open(path, 'wb') as f:
|
| 559 |
+
f.write(data)
|
| 560 |
+
self.record_as_written(path)
|
| 561 |
+
|
| 562 |
+
def write_text_file(self, path, data, encoding):
|
| 563 |
+
self.write_binary_file(path, data.encode(encoding))
|
| 564 |
+
|
| 565 |
+
def set_mode(self, bits, mask, files):
|
| 566 |
+
if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):
|
| 567 |
+
# Set the executable bits (owner, group, and world) on
|
| 568 |
+
# all the files specified.
|
| 569 |
+
for f in files:
|
| 570 |
+
if self.dry_run:
|
| 571 |
+
logger.info("changing mode of %s", f)
|
| 572 |
+
else:
|
| 573 |
+
mode = (os.stat(f).st_mode | bits) & mask
|
| 574 |
+
logger.info("changing mode of %s to %o", f, mode)
|
| 575 |
+
os.chmod(f, mode)
|
| 576 |
+
|
| 577 |
+
set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)
|
| 578 |
+
|
| 579 |
+
def ensure_dir(self, path):
|
| 580 |
+
path = os.path.abspath(path)
|
| 581 |
+
if path not in self.ensured and not os.path.exists(path):
|
| 582 |
+
self.ensured.add(path)
|
| 583 |
+
d, f = os.path.split(path)
|
| 584 |
+
self.ensure_dir(d)
|
| 585 |
+
logger.info('Creating %s' % path)
|
| 586 |
+
if not self.dry_run:
|
| 587 |
+
os.mkdir(path)
|
| 588 |
+
if self.record:
|
| 589 |
+
self.dirs_created.add(path)
|
| 590 |
+
|
| 591 |
+
def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False):
|
| 592 |
+
dpath = cache_from_source(path, not optimize)
|
| 593 |
+
logger.info('Byte-compiling %s to %s', path, dpath)
|
| 594 |
+
if not self.dry_run:
|
| 595 |
+
if force or self.newer(path, dpath):
|
| 596 |
+
if not prefix:
|
| 597 |
+
diagpath = None
|
| 598 |
+
else:
|
| 599 |
+
assert path.startswith(prefix)
|
| 600 |
+
diagpath = path[len(prefix):]
|
| 601 |
+
compile_kwargs = {}
|
| 602 |
+
if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'):
|
| 603 |
+
if not isinstance(hashed_invalidation, py_compile.PycInvalidationMode):
|
| 604 |
+
hashed_invalidation = py_compile.PycInvalidationMode.CHECKED_HASH
|
| 605 |
+
compile_kwargs['invalidation_mode'] = hashed_invalidation
|
| 606 |
+
py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error
|
| 607 |
+
self.record_as_written(dpath)
|
| 608 |
+
return dpath
|
| 609 |
+
|
| 610 |
+
def ensure_removed(self, path):
|
| 611 |
+
if os.path.exists(path):
|
| 612 |
+
if os.path.isdir(path) and not os.path.islink(path):
|
| 613 |
+
logger.debug('Removing directory tree at %s', path)
|
| 614 |
+
if not self.dry_run:
|
| 615 |
+
shutil.rmtree(path)
|
| 616 |
+
if self.record:
|
| 617 |
+
if path in self.dirs_created:
|
| 618 |
+
self.dirs_created.remove(path)
|
| 619 |
+
else:
|
| 620 |
+
if os.path.islink(path):
|
| 621 |
+
s = 'link'
|
| 622 |
+
else:
|
| 623 |
+
s = 'file'
|
| 624 |
+
logger.debug('Removing %s %s', s, path)
|
| 625 |
+
if not self.dry_run:
|
| 626 |
+
os.remove(path)
|
| 627 |
+
if self.record:
|
| 628 |
+
if path in self.files_written:
|
| 629 |
+
self.files_written.remove(path)
|
| 630 |
+
|
| 631 |
+
def is_writable(self, path):
|
| 632 |
+
result = False
|
| 633 |
+
while not result:
|
| 634 |
+
if os.path.exists(path):
|
| 635 |
+
result = os.access(path, os.W_OK)
|
| 636 |
+
break
|
| 637 |
+
parent = os.path.dirname(path)
|
| 638 |
+
if parent == path:
|
| 639 |
+
break
|
| 640 |
+
path = parent
|
| 641 |
+
return result
|
| 642 |
+
|
| 643 |
+
def commit(self):
|
| 644 |
+
"""
|
| 645 |
+
Commit recorded changes, turn off recording, return
|
| 646 |
+
changes.
|
| 647 |
+
"""
|
| 648 |
+
assert self.record
|
| 649 |
+
result = self.files_written, self.dirs_created
|
| 650 |
+
self._init_record()
|
| 651 |
+
return result
|
| 652 |
+
|
| 653 |
+
def rollback(self):
|
| 654 |
+
if not self.dry_run:
|
| 655 |
+
for f in list(self.files_written):
|
| 656 |
+
if os.path.exists(f):
|
| 657 |
+
os.remove(f)
|
| 658 |
+
# dirs should all be empty now, except perhaps for
|
| 659 |
+
# __pycache__ subdirs
|
| 660 |
+
# reverse so that subdirs appear before their parents
|
| 661 |
+
dirs = sorted(self.dirs_created, reverse=True)
|
| 662 |
+
for d in dirs:
|
| 663 |
+
flist = os.listdir(d)
|
| 664 |
+
if flist:
|
| 665 |
+
assert flist == ['__pycache__']
|
| 666 |
+
sd = os.path.join(d, flist[0])
|
| 667 |
+
os.rmdir(sd)
|
| 668 |
+
os.rmdir(d) # should fail if non-empty
|
| 669 |
+
self._init_record()
|
| 670 |
+
|
| 671 |
+
|
| 672 |
+
def resolve(module_name, dotted_path):
|
| 673 |
+
if module_name in sys.modules:
|
| 674 |
+
mod = sys.modules[module_name]
|
| 675 |
+
else:
|
| 676 |
+
mod = __import__(module_name)
|
| 677 |
+
if dotted_path is None:
|
| 678 |
+
result = mod
|
| 679 |
+
else:
|
| 680 |
+
parts = dotted_path.split('.')
|
| 681 |
+
result = getattr(mod, parts.pop(0))
|
| 682 |
+
for p in parts:
|
| 683 |
+
result = getattr(result, p)
|
| 684 |
+
return result
|
| 685 |
+
|
| 686 |
+
|
| 687 |
+
class ExportEntry(object):
|
| 688 |
+
|
| 689 |
+
def __init__(self, name, prefix, suffix, flags):
|
| 690 |
+
self.name = name
|
| 691 |
+
self.prefix = prefix
|
| 692 |
+
self.suffix = suffix
|
| 693 |
+
self.flags = flags
|
| 694 |
+
|
| 695 |
+
@cached_property
|
| 696 |
+
def value(self):
|
| 697 |
+
return resolve(self.prefix, self.suffix)
|
| 698 |
+
|
| 699 |
+
def __repr__(self): # pragma: no cover
|
| 700 |
+
return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix, self.suffix, self.flags)
|
| 701 |
+
|
| 702 |
+
def __eq__(self, other):
|
| 703 |
+
if not isinstance(other, ExportEntry):
|
| 704 |
+
result = False
|
| 705 |
+
else:
|
| 706 |
+
result = (self.name == other.name and self.prefix == other.prefix and self.suffix == other.suffix and
|
| 707 |
+
self.flags == other.flags)
|
| 708 |
+
return result
|
| 709 |
+
|
| 710 |
+
__hash__ = object.__hash__
|
| 711 |
+
|
| 712 |
+
|
| 713 |
+
ENTRY_RE = re.compile(
|
| 714 |
+
r'''(?P<name>([^\[]\S*))
|
| 715 |
+
\s*=\s*(?P<callable>(\w+)([:\.]\w+)*)
|
| 716 |
+
\s*(\[\s*(?P<flags>[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
|
| 717 |
+
''', re.VERBOSE)
|
| 718 |
+
|
| 719 |
+
|
| 720 |
+
def get_export_entry(specification):
|
| 721 |
+
m = ENTRY_RE.search(specification)
|
| 722 |
+
if not m:
|
| 723 |
+
result = None
|
| 724 |
+
if '[' in specification or ']' in specification:
|
| 725 |
+
raise DistlibException("Invalid specification "
|
| 726 |
+
"'%s'" % specification)
|
| 727 |
+
else:
|
| 728 |
+
d = m.groupdict()
|
| 729 |
+
name = d['name']
|
| 730 |
+
path = d['callable']
|
| 731 |
+
colons = path.count(':')
|
| 732 |
+
if colons == 0:
|
| 733 |
+
prefix, suffix = path, None
|
| 734 |
+
else:
|
| 735 |
+
if colons != 1:
|
| 736 |
+
raise DistlibException("Invalid specification "
|
| 737 |
+
"'%s'" % specification)
|
| 738 |
+
prefix, suffix = path.split(':')
|
| 739 |
+
flags = d['flags']
|
| 740 |
+
if flags is None:
|
| 741 |
+
if '[' in specification or ']' in specification:
|
| 742 |
+
raise DistlibException("Invalid specification "
|
| 743 |
+
"'%s'" % specification)
|
| 744 |
+
flags = []
|
| 745 |
+
else:
|
| 746 |
+
flags = [f.strip() for f in flags.split(',')]
|
| 747 |
+
result = ExportEntry(name, prefix, suffix, flags)
|
| 748 |
+
return result
|
| 749 |
+
|
| 750 |
+
|
| 751 |
+
def get_cache_base(suffix=None):
|
| 752 |
+
"""
|
| 753 |
+
Return the default base location for distlib caches. If the directory does
|
| 754 |
+
not exist, it is created. Use the suffix provided for the base directory,
|
| 755 |
+
and default to '.distlib' if it isn't provided.
|
| 756 |
+
|
| 757 |
+
On Windows, if LOCALAPPDATA is defined in the environment, then it is
|
| 758 |
+
assumed to be a directory, and will be the parent directory of the result.
|
| 759 |
+
On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
|
| 760 |
+
directory - using os.expanduser('~') - will be the parent directory of
|
| 761 |
+
the result.
|
| 762 |
+
|
| 763 |
+
The result is just the directory '.distlib' in the parent directory as
|
| 764 |
+
determined above, or with the name specified with ``suffix``.
|
| 765 |
+
"""
|
| 766 |
+
if suffix is None:
|
| 767 |
+
suffix = '.distlib'
|
| 768 |
+
if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:
|
| 769 |
+
result = os.path.expandvars('$localappdata')
|
| 770 |
+
else:
|
| 771 |
+
# Assume posix, or old Windows
|
| 772 |
+
result = os.path.expanduser('~')
|
| 773 |
+
# we use 'isdir' instead of 'exists', because we want to
|
| 774 |
+
# fail if there's a file with that name
|
| 775 |
+
if os.path.isdir(result):
|
| 776 |
+
usable = os.access(result, os.W_OK)
|
| 777 |
+
if not usable:
|
| 778 |
+
logger.warning('Directory exists but is not writable: %s', result)
|
| 779 |
+
else:
|
| 780 |
+
try:
|
| 781 |
+
os.makedirs(result)
|
| 782 |
+
usable = True
|
| 783 |
+
except OSError:
|
| 784 |
+
logger.warning('Unable to create %s', result, exc_info=True)
|
| 785 |
+
usable = False
|
| 786 |
+
if not usable:
|
| 787 |
+
result = tempfile.mkdtemp()
|
| 788 |
+
logger.warning('Default location unusable, using %s', result)
|
| 789 |
+
return os.path.join(result, suffix)
|
| 790 |
+
|
| 791 |
+
|
| 792 |
+
def path_to_cache_dir(path, use_abspath=True):
|
| 793 |
+
"""
|
| 794 |
+
Convert an absolute path to a directory name for use in a cache.
|
| 795 |
+
|
| 796 |
+
The algorithm used is:
|
| 797 |
+
|
| 798 |
+
#. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
|
| 799 |
+
#. Any occurrence of ``os.sep`` is replaced with ``'--'``.
|
| 800 |
+
#. ``'.cache'`` is appended.
|
| 801 |
+
"""
|
| 802 |
+
d, p = os.path.splitdrive(os.path.abspath(path) if use_abspath else path)
|
| 803 |
+
if d:
|
| 804 |
+
d = d.replace(':', '---')
|
| 805 |
+
p = p.replace(os.sep, '--')
|
| 806 |
+
return d + p + '.cache'
|
| 807 |
+
|
| 808 |
+
|
| 809 |
+
def ensure_slash(s):
|
| 810 |
+
if not s.endswith('/'):
|
| 811 |
+
return s + '/'
|
| 812 |
+
return s
|
| 813 |
+
|
| 814 |
+
|
| 815 |
+
def parse_credentials(netloc):
|
| 816 |
+
username = password = None
|
| 817 |
+
if '@' in netloc:
|
| 818 |
+
prefix, netloc = netloc.rsplit('@', 1)
|
| 819 |
+
if ':' not in prefix:
|
| 820 |
+
username = prefix
|
| 821 |
+
else:
|
| 822 |
+
username, password = prefix.split(':', 1)
|
| 823 |
+
if username:
|
| 824 |
+
username = unquote(username)
|
| 825 |
+
if password:
|
| 826 |
+
password = unquote(password)
|
| 827 |
+
return username, password, netloc
|
| 828 |
+
|
| 829 |
+
|
| 830 |
+
def get_process_umask():
|
| 831 |
+
result = os.umask(0o22)
|
| 832 |
+
os.umask(result)
|
| 833 |
+
return result
|
| 834 |
+
|
| 835 |
+
|
| 836 |
+
def is_string_sequence(seq):
|
| 837 |
+
result = True
|
| 838 |
+
i = None
|
| 839 |
+
for i, s in enumerate(seq):
|
| 840 |
+
if not isinstance(s, string_types):
|
| 841 |
+
result = False
|
| 842 |
+
break
|
| 843 |
+
assert i is not None
|
| 844 |
+
return result
|
| 845 |
+
|
| 846 |
+
|
| 847 |
+
PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
|
| 848 |
+
'([a-z0-9_.+-]+)', re.I)
|
| 849 |
+
PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')
|
| 850 |
+
|
| 851 |
+
|
| 852 |
+
def split_filename(filename, project_name=None):
|
| 853 |
+
"""
|
| 854 |
+
Extract name, version, python version from a filename (no extension)
|
| 855 |
+
|
| 856 |
+
Return name, version, pyver or None
|
| 857 |
+
"""
|
| 858 |
+
result = None
|
| 859 |
+
pyver = None
|
| 860 |
+
filename = unquote(filename).replace(' ', '-')
|
| 861 |
+
m = PYTHON_VERSION.search(filename)
|
| 862 |
+
if m:
|
| 863 |
+
pyver = m.group(1)
|
| 864 |
+
filename = filename[:m.start()]
|
| 865 |
+
if project_name and len(filename) > len(project_name) + 1:
|
| 866 |
+
m = re.match(re.escape(project_name) + r'\b', filename)
|
| 867 |
+
if m:
|
| 868 |
+
n = m.end()
|
| 869 |
+
result = filename[:n], filename[n + 1:], pyver
|
| 870 |
+
if result is None:
|
| 871 |
+
m = PROJECT_NAME_AND_VERSION.match(filename)
|
| 872 |
+
if m:
|
| 873 |
+
result = m.group(1), m.group(3), pyver
|
| 874 |
+
return result
|
| 875 |
+
|
| 876 |
+
|
| 877 |
+
# Allow spaces in name because of legacy dists like "Twisted Core"
|
| 878 |
+
NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*'
|
| 879 |
+
r'\(\s*(?P<ver>[^\s)]+)\)$')
|
| 880 |
+
|
| 881 |
+
|
| 882 |
+
def parse_name_and_version(p):
|
| 883 |
+
"""
|
| 884 |
+
A utility method used to get name and version from a string.
|
| 885 |
+
|
| 886 |
+
From e.g. a Provides-Dist value.
|
| 887 |
+
|
| 888 |
+
:param p: A value in a form 'foo (1.0)'
|
| 889 |
+
:return: The name and version as a tuple.
|
| 890 |
+
"""
|
| 891 |
+
m = NAME_VERSION_RE.match(p)
|
| 892 |
+
if not m:
|
| 893 |
+
raise DistlibException('Ill-formed name/version string: \'%s\'' % p)
|
| 894 |
+
d = m.groupdict()
|
| 895 |
+
return d['name'].strip().lower(), d['ver']
|
| 896 |
+
|
| 897 |
+
|
| 898 |
+
def get_extras(requested, available):
|
| 899 |
+
result = set()
|
| 900 |
+
requested = set(requested or [])
|
| 901 |
+
available = set(available or [])
|
| 902 |
+
if '*' in requested:
|
| 903 |
+
requested.remove('*')
|
| 904 |
+
result |= available
|
| 905 |
+
for r in requested:
|
| 906 |
+
if r == '-':
|
| 907 |
+
result.add(r)
|
| 908 |
+
elif r.startswith('-'):
|
| 909 |
+
unwanted = r[1:]
|
| 910 |
+
if unwanted not in available:
|
| 911 |
+
logger.warning('undeclared extra: %s' % unwanted)
|
| 912 |
+
if unwanted in result:
|
| 913 |
+
result.remove(unwanted)
|
| 914 |
+
else:
|
| 915 |
+
if r not in available:
|
| 916 |
+
logger.warning('undeclared extra: %s' % r)
|
| 917 |
+
result.add(r)
|
| 918 |
+
return result
|
| 919 |
+
|
| 920 |
+
|
| 921 |
+
#
|
| 922 |
+
# Extended metadata functionality
|
| 923 |
+
#
|
| 924 |
+
|
| 925 |
+
|
| 926 |
+
def _get_external_data(url):
|
| 927 |
+
result = {}
|
| 928 |
+
try:
|
| 929 |
+
# urlopen might fail if it runs into redirections,
|
| 930 |
+
# because of Python issue #13696. Fixed in locators
|
| 931 |
+
# using a custom redirect handler.
|
| 932 |
+
resp = urlopen(url)
|
| 933 |
+
headers = resp.info()
|
| 934 |
+
ct = headers.get('Content-Type')
|
| 935 |
+
if not ct.startswith('application/json'):
|
| 936 |
+
logger.debug('Unexpected response for JSON request: %s', ct)
|
| 937 |
+
else:
|
| 938 |
+
reader = codecs.getreader('utf-8')(resp)
|
| 939 |
+
# data = reader.read().decode('utf-8')
|
| 940 |
+
# result = json.loads(data)
|
| 941 |
+
result = json.load(reader)
|
| 942 |
+
except Exception as e:
|
| 943 |
+
logger.exception('Failed to get external data for %s: %s', url, e)
|
| 944 |
+
return result
|
| 945 |
+
|
| 946 |
+
|
| 947 |
+
_external_data_base_url = 'https://www.red-dove.com/pypi/projects/'
|
| 948 |
+
|
| 949 |
+
|
| 950 |
+
def get_project_data(name):
|
| 951 |
+
url = '%s/%s/project.json' % (name[0].upper(), name)
|
| 952 |
+
url = urljoin(_external_data_base_url, url)
|
| 953 |
+
result = _get_external_data(url)
|
| 954 |
+
return result
|
| 955 |
+
|
| 956 |
+
|
| 957 |
+
def get_package_data(name, version):
|
| 958 |
+
url = '%s/%s/package-%s.json' % (name[0].upper(), name, version)
|
| 959 |
+
url = urljoin(_external_data_base_url, url)
|
| 960 |
+
return _get_external_data(url)
|
| 961 |
+
|
| 962 |
+
|
| 963 |
+
class Cache(object):
|
| 964 |
+
"""
|
| 965 |
+
A class implementing a cache for resources that need to live in the file system
|
| 966 |
+
e.g. shared libraries. This class was moved from resources to here because it
|
| 967 |
+
could be used by other modules, e.g. the wheel module.
|
| 968 |
+
"""
|
| 969 |
+
|
| 970 |
+
def __init__(self, base):
|
| 971 |
+
"""
|
| 972 |
+
Initialise an instance.
|
| 973 |
+
|
| 974 |
+
:param base: The base directory where the cache should be located.
|
| 975 |
+
"""
|
| 976 |
+
# we use 'isdir' instead of 'exists', because we want to
|
| 977 |
+
# fail if there's a file with that name
|
| 978 |
+
if not os.path.isdir(base): # pragma: no cover
|
| 979 |
+
os.makedirs(base)
|
| 980 |
+
if (os.stat(base).st_mode & 0o77) != 0:
|
| 981 |
+
logger.warning('Directory \'%s\' is not private', base)
|
| 982 |
+
self.base = os.path.abspath(os.path.normpath(base))
|
| 983 |
+
|
| 984 |
+
def prefix_to_dir(self, prefix, use_abspath=True):
|
| 985 |
+
"""
|
| 986 |
+
Converts a resource prefix to a directory name in the cache.
|
| 987 |
+
"""
|
| 988 |
+
return path_to_cache_dir(prefix, use_abspath=use_abspath)
|
| 989 |
+
|
| 990 |
+
def clear(self):
|
| 991 |
+
"""
|
| 992 |
+
Clear the cache.
|
| 993 |
+
"""
|
| 994 |
+
not_removed = []
|
| 995 |
+
for fn in os.listdir(self.base):
|
| 996 |
+
fn = os.path.join(self.base, fn)
|
| 997 |
+
try:
|
| 998 |
+
if os.path.islink(fn) or os.path.isfile(fn):
|
| 999 |
+
os.remove(fn)
|
| 1000 |
+
elif os.path.isdir(fn):
|
| 1001 |
+
shutil.rmtree(fn)
|
| 1002 |
+
except Exception:
|
| 1003 |
+
not_removed.append(fn)
|
| 1004 |
+
return not_removed
|
| 1005 |
+
|
| 1006 |
+
|
| 1007 |
+
class EventMixin(object):
|
| 1008 |
+
"""
|
| 1009 |
+
A very simple publish/subscribe system.
|
| 1010 |
+
"""
|
| 1011 |
+
|
| 1012 |
+
def __init__(self):
|
| 1013 |
+
self._subscribers = {}
|
| 1014 |
+
|
| 1015 |
+
def add(self, event, subscriber, append=True):
|
| 1016 |
+
"""
|
| 1017 |
+
Add a subscriber for an event.
|
| 1018 |
+
|
| 1019 |
+
:param event: The name of an event.
|
| 1020 |
+
:param subscriber: The subscriber to be added (and called when the
|
| 1021 |
+
event is published).
|
| 1022 |
+
:param append: Whether to append or prepend the subscriber to an
|
| 1023 |
+
existing subscriber list for the event.
|
| 1024 |
+
"""
|
| 1025 |
+
subs = self._subscribers
|
| 1026 |
+
if event not in subs:
|
| 1027 |
+
subs[event] = deque([subscriber])
|
| 1028 |
+
else:
|
| 1029 |
+
sq = subs[event]
|
| 1030 |
+
if append:
|
| 1031 |
+
sq.append(subscriber)
|
| 1032 |
+
else:
|
| 1033 |
+
sq.appendleft(subscriber)
|
| 1034 |
+
|
| 1035 |
+
def remove(self, event, subscriber):
|
| 1036 |
+
"""
|
| 1037 |
+
Remove a subscriber for an event.
|
| 1038 |
+
|
| 1039 |
+
:param event: The name of an event.
|
| 1040 |
+
:param subscriber: The subscriber to be removed.
|
| 1041 |
+
"""
|
| 1042 |
+
subs = self._subscribers
|
| 1043 |
+
if event not in subs:
|
| 1044 |
+
raise ValueError('No subscribers: %r' % event)
|
| 1045 |
+
subs[event].remove(subscriber)
|
| 1046 |
+
|
| 1047 |
+
def get_subscribers(self, event):
|
| 1048 |
+
"""
|
| 1049 |
+
Return an iterator for the subscribers for an event.
|
| 1050 |
+
:param event: The event to return subscribers for.
|
| 1051 |
+
"""
|
| 1052 |
+
return iter(self._subscribers.get(event, ()))
|
| 1053 |
+
|
| 1054 |
+
def publish(self, event, *args, **kwargs):
|
| 1055 |
+
"""
|
| 1056 |
+
Publish a event and return a list of values returned by its
|
| 1057 |
+
subscribers.
|
| 1058 |
+
|
| 1059 |
+
:param event: The event to publish.
|
| 1060 |
+
:param args: The positional arguments to pass to the event's
|
| 1061 |
+
subscribers.
|
| 1062 |
+
:param kwargs: The keyword arguments to pass to the event's
|
| 1063 |
+
subscribers.
|
| 1064 |
+
"""
|
| 1065 |
+
result = []
|
| 1066 |
+
for subscriber in self.get_subscribers(event):
|
| 1067 |
+
try:
|
| 1068 |
+
value = subscriber(event, *args, **kwargs)
|
| 1069 |
+
except Exception:
|
| 1070 |
+
logger.exception('Exception during event publication')
|
| 1071 |
+
value = None
|
| 1072 |
+
result.append(value)
|
| 1073 |
+
logger.debug('publish %s: args = %s, kwargs = %s, result = %s', event, args, kwargs, result)
|
| 1074 |
+
return result
|
| 1075 |
+
|
| 1076 |
+
|
| 1077 |
+
#
|
| 1078 |
+
# Simple sequencing
|
| 1079 |
+
#
|
| 1080 |
+
class Sequencer(object):
|
| 1081 |
+
|
| 1082 |
+
def __init__(self):
|
| 1083 |
+
self._preds = {}
|
| 1084 |
+
self._succs = {}
|
| 1085 |
+
self._nodes = set() # nodes with no preds/succs
|
| 1086 |
+
|
| 1087 |
+
def add_node(self, node):
|
| 1088 |
+
self._nodes.add(node)
|
| 1089 |
+
|
| 1090 |
+
def remove_node(self, node, edges=False):
|
| 1091 |
+
if node in self._nodes:
|
| 1092 |
+
self._nodes.remove(node)
|
| 1093 |
+
if edges:
|
| 1094 |
+
for p in set(self._preds.get(node, ())):
|
| 1095 |
+
self.remove(p, node)
|
| 1096 |
+
for s in set(self._succs.get(node, ())):
|
| 1097 |
+
self.remove(node, s)
|
| 1098 |
+
# Remove empties
|
| 1099 |
+
for k, v in list(self._preds.items()):
|
| 1100 |
+
if not v:
|
| 1101 |
+
del self._preds[k]
|
| 1102 |
+
for k, v in list(self._succs.items()):
|
| 1103 |
+
if not v:
|
| 1104 |
+
del self._succs[k]
|
| 1105 |
+
|
| 1106 |
+
def add(self, pred, succ):
|
| 1107 |
+
assert pred != succ
|
| 1108 |
+
self._preds.setdefault(succ, set()).add(pred)
|
| 1109 |
+
self._succs.setdefault(pred, set()).add(succ)
|
| 1110 |
+
|
| 1111 |
+
def remove(self, pred, succ):
|
| 1112 |
+
assert pred != succ
|
| 1113 |
+
try:
|
| 1114 |
+
preds = self._preds[succ]
|
| 1115 |
+
succs = self._succs[pred]
|
| 1116 |
+
except KeyError: # pragma: no cover
|
| 1117 |
+
raise ValueError('%r not a successor of anything' % succ)
|
| 1118 |
+
try:
|
| 1119 |
+
preds.remove(pred)
|
| 1120 |
+
succs.remove(succ)
|
| 1121 |
+
except KeyError: # pragma: no cover
|
| 1122 |
+
raise ValueError('%r not a successor of %r' % (succ, pred))
|
| 1123 |
+
|
| 1124 |
+
def is_step(self, step):
|
| 1125 |
+
return (step in self._preds or step in self._succs or step in self._nodes)
|
| 1126 |
+
|
| 1127 |
+
def get_steps(self, final):
|
| 1128 |
+
if not self.is_step(final):
|
| 1129 |
+
raise ValueError('Unknown: %r' % final)
|
| 1130 |
+
result = []
|
| 1131 |
+
todo = []
|
| 1132 |
+
seen = set()
|
| 1133 |
+
todo.append(final)
|
| 1134 |
+
while todo:
|
| 1135 |
+
step = todo.pop(0)
|
| 1136 |
+
if step in seen:
|
| 1137 |
+
# if a step was already seen,
|
| 1138 |
+
# move it to the end (so it will appear earlier
|
| 1139 |
+
# when reversed on return) ... but not for the
|
| 1140 |
+
# final step, as that would be confusing for
|
| 1141 |
+
# users
|
| 1142 |
+
if step != final:
|
| 1143 |
+
result.remove(step)
|
| 1144 |
+
result.append(step)
|
| 1145 |
+
else:
|
| 1146 |
+
seen.add(step)
|
| 1147 |
+
result.append(step)
|
| 1148 |
+
preds = self._preds.get(step, ())
|
| 1149 |
+
todo.extend(preds)
|
| 1150 |
+
return reversed(result)
|
| 1151 |
+
|
| 1152 |
+
@property
|
| 1153 |
+
def strong_connections(self):
|
| 1154 |
+
# http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
|
| 1155 |
+
index_counter = [0]
|
| 1156 |
+
stack = []
|
| 1157 |
+
lowlinks = {}
|
| 1158 |
+
index = {}
|
| 1159 |
+
result = []
|
| 1160 |
+
|
| 1161 |
+
graph = self._succs
|
| 1162 |
+
|
| 1163 |
+
def strongconnect(node):
|
| 1164 |
+
# set the depth index for this node to the smallest unused index
|
| 1165 |
+
index[node] = index_counter[0]
|
| 1166 |
+
lowlinks[node] = index_counter[0]
|
| 1167 |
+
index_counter[0] += 1
|
| 1168 |
+
stack.append(node)
|
| 1169 |
+
|
| 1170 |
+
# Consider successors
|
| 1171 |
+
try:
|
| 1172 |
+
successors = graph[node]
|
| 1173 |
+
except Exception:
|
| 1174 |
+
successors = []
|
| 1175 |
+
for successor in successors:
|
| 1176 |
+
if successor not in lowlinks:
|
| 1177 |
+
# Successor has not yet been visited
|
| 1178 |
+
strongconnect(successor)
|
| 1179 |
+
lowlinks[node] = min(lowlinks[node], lowlinks[successor])
|
| 1180 |
+
elif successor in stack:
|
| 1181 |
+
# the successor is in the stack and hence in the current
|
| 1182 |
+
# strongly connected component (SCC)
|
| 1183 |
+
lowlinks[node] = min(lowlinks[node], index[successor])
|
| 1184 |
+
|
| 1185 |
+
# If `node` is a root node, pop the stack and generate an SCC
|
| 1186 |
+
if lowlinks[node] == index[node]:
|
| 1187 |
+
connected_component = []
|
| 1188 |
+
|
| 1189 |
+
while True:
|
| 1190 |
+
successor = stack.pop()
|
| 1191 |
+
connected_component.append(successor)
|
| 1192 |
+
if successor == node:
|
| 1193 |
+
break
|
| 1194 |
+
component = tuple(connected_component)
|
| 1195 |
+
# storing the result
|
| 1196 |
+
result.append(component)
|
| 1197 |
+
|
| 1198 |
+
for node in graph:
|
| 1199 |
+
if node not in lowlinks:
|
| 1200 |
+
strongconnect(node)
|
| 1201 |
+
|
| 1202 |
+
return result
|
| 1203 |
+
|
| 1204 |
+
@property
|
| 1205 |
+
def dot(self):
|
| 1206 |
+
result = ['digraph G {']
|
| 1207 |
+
for succ in self._preds:
|
| 1208 |
+
preds = self._preds[succ]
|
| 1209 |
+
for pred in preds:
|
| 1210 |
+
result.append(' %s -> %s;' % (pred, succ))
|
| 1211 |
+
for node in self._nodes:
|
| 1212 |
+
result.append(' %s;' % node)
|
| 1213 |
+
result.append('}')
|
| 1214 |
+
return '\n'.join(result)
|
| 1215 |
+
|
| 1216 |
+
|
| 1217 |
+
#
|
| 1218 |
+
# Unarchiving functionality for zip, tar, tgz, tbz, whl
|
| 1219 |
+
#
|
| 1220 |
+
|
| 1221 |
+
ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz', '.whl')
|
| 1222 |
+
|
| 1223 |
+
|
| 1224 |
+
def unarchive(archive_filename, dest_dir, format=None, check=True):
|
| 1225 |
+
|
| 1226 |
+
def check_path(path):
|
| 1227 |
+
if not isinstance(path, text_type):
|
| 1228 |
+
path = path.decode('utf-8')
|
| 1229 |
+
p = os.path.abspath(os.path.join(dest_dir, path))
|
| 1230 |
+
if not p.startswith(dest_dir) or p[plen] != os.sep:
|
| 1231 |
+
raise ValueError('path outside destination: %r' % p)
|
| 1232 |
+
|
| 1233 |
+
dest_dir = os.path.abspath(dest_dir)
|
| 1234 |
+
plen = len(dest_dir)
|
| 1235 |
+
archive = None
|
| 1236 |
+
if format is None:
|
| 1237 |
+
if archive_filename.endswith(('.zip', '.whl')):
|
| 1238 |
+
format = 'zip'
|
| 1239 |
+
elif archive_filename.endswith(('.tar.gz', '.tgz')):
|
| 1240 |
+
format = 'tgz'
|
| 1241 |
+
mode = 'r:gz'
|
| 1242 |
+
elif archive_filename.endswith(('.tar.bz2', '.tbz')):
|
| 1243 |
+
format = 'tbz'
|
| 1244 |
+
mode = 'r:bz2'
|
| 1245 |
+
elif archive_filename.endswith('.tar'):
|
| 1246 |
+
format = 'tar'
|
| 1247 |
+
mode = 'r'
|
| 1248 |
+
else: # pragma: no cover
|
| 1249 |
+
raise ValueError('Unknown format for %r' % archive_filename)
|
| 1250 |
+
try:
|
| 1251 |
+
if format == 'zip':
|
| 1252 |
+
archive = ZipFile(archive_filename, 'r')
|
| 1253 |
+
if check:
|
| 1254 |
+
names = archive.namelist()
|
| 1255 |
+
for name in names:
|
| 1256 |
+
check_path(name)
|
| 1257 |
+
else:
|
| 1258 |
+
archive = tarfile.open(archive_filename, mode)
|
| 1259 |
+
if check:
|
| 1260 |
+
names = archive.getnames()
|
| 1261 |
+
for name in names:
|
| 1262 |
+
check_path(name)
|
| 1263 |
+
if format != 'zip' and sys.version_info[0] < 3:
|
| 1264 |
+
# See Python issue 17153. If the dest path contains Unicode,
|
| 1265 |
+
# tarfile extraction fails on Python 2.x if a member path name
|
| 1266 |
+
# contains non-ASCII characters - it leads to an implicit
|
| 1267 |
+
# bytes -> unicode conversion using ASCII to decode.
|
| 1268 |
+
for tarinfo in archive.getmembers():
|
| 1269 |
+
if not isinstance(tarinfo.name, text_type):
|
| 1270 |
+
tarinfo.name = tarinfo.name.decode('utf-8')
|
| 1271 |
+
|
| 1272 |
+
# Limit extraction of dangerous items, if this Python
|
| 1273 |
+
# allows it easily. If not, just trust the input.
|
| 1274 |
+
# See: https://docs.python.org/3/library/tarfile.html#extraction-filters
|
| 1275 |
+
def extraction_filter(member, path):
|
| 1276 |
+
"""Run tarfile.tar_filter, but raise the expected ValueError"""
|
| 1277 |
+
# This is only called if the current Python has tarfile filters
|
| 1278 |
+
try:
|
| 1279 |
+
return tarfile.tar_filter(member, path)
|
| 1280 |
+
except tarfile.FilterError as exc:
|
| 1281 |
+
raise ValueError(str(exc))
|
| 1282 |
+
|
| 1283 |
+
archive.extraction_filter = extraction_filter
|
| 1284 |
+
|
| 1285 |
+
archive.extractall(dest_dir)
|
| 1286 |
+
|
| 1287 |
+
finally:
|
| 1288 |
+
if archive:
|
| 1289 |
+
archive.close()
|
| 1290 |
+
|
| 1291 |
+
|
| 1292 |
+
def zip_dir(directory):
|
| 1293 |
+
"""zip a directory tree into a BytesIO object"""
|
| 1294 |
+
result = io.BytesIO()
|
| 1295 |
+
dlen = len(directory)
|
| 1296 |
+
with ZipFile(result, "w") as zf:
|
| 1297 |
+
for root, dirs, files in os.walk(directory):
|
| 1298 |
+
for name in files:
|
| 1299 |
+
full = os.path.join(root, name)
|
| 1300 |
+
rel = root[dlen:]
|
| 1301 |
+
dest = os.path.join(rel, name)
|
| 1302 |
+
zf.write(full, dest)
|
| 1303 |
+
return result
|
| 1304 |
+
|
| 1305 |
+
|
| 1306 |
+
#
|
| 1307 |
+
# Simple progress bar
|
| 1308 |
+
#
|
| 1309 |
+
|
| 1310 |
+
UNITS = ('', 'K', 'M', 'G', 'T', 'P')
|
| 1311 |
+
|
| 1312 |
+
|
| 1313 |
+
class Progress(object):
|
| 1314 |
+
unknown = 'UNKNOWN'
|
| 1315 |
+
|
| 1316 |
+
def __init__(self, minval=0, maxval=100):
|
| 1317 |
+
assert maxval is None or maxval >= minval
|
| 1318 |
+
self.min = self.cur = minval
|
| 1319 |
+
self.max = maxval
|
| 1320 |
+
self.started = None
|
| 1321 |
+
self.elapsed = 0
|
| 1322 |
+
self.done = False
|
| 1323 |
+
|
| 1324 |
+
def update(self, curval):
|
| 1325 |
+
assert self.min <= curval
|
| 1326 |
+
assert self.max is None or curval <= self.max
|
| 1327 |
+
self.cur = curval
|
| 1328 |
+
now = time.time()
|
| 1329 |
+
if self.started is None:
|
| 1330 |
+
self.started = now
|
| 1331 |
+
else:
|
| 1332 |
+
self.elapsed = now - self.started
|
| 1333 |
+
|
| 1334 |
+
def increment(self, incr):
|
| 1335 |
+
assert incr >= 0
|
| 1336 |
+
self.update(self.cur + incr)
|
| 1337 |
+
|
| 1338 |
+
def start(self):
|
| 1339 |
+
self.update(self.min)
|
| 1340 |
+
return self
|
| 1341 |
+
|
| 1342 |
+
def stop(self):
|
| 1343 |
+
if self.max is not None:
|
| 1344 |
+
self.update(self.max)
|
| 1345 |
+
self.done = True
|
| 1346 |
+
|
| 1347 |
+
@property
|
| 1348 |
+
def maximum(self):
|
| 1349 |
+
return self.unknown if self.max is None else self.max
|
| 1350 |
+
|
| 1351 |
+
@property
|
| 1352 |
+
def percentage(self):
|
| 1353 |
+
if self.done:
|
| 1354 |
+
result = '100 %'
|
| 1355 |
+
elif self.max is None:
|
| 1356 |
+
result = ' ?? %'
|
| 1357 |
+
else:
|
| 1358 |
+
v = 100.0 * (self.cur - self.min) / (self.max - self.min)
|
| 1359 |
+
result = '%3d %%' % v
|
| 1360 |
+
return result
|
| 1361 |
+
|
| 1362 |
+
def format_duration(self, duration):
|
| 1363 |
+
if (duration <= 0) and self.max is None or self.cur == self.min:
|
| 1364 |
+
result = '??:??:??'
|
| 1365 |
+
# elif duration < 1:
|
| 1366 |
+
# result = '--:--:--'
|
| 1367 |
+
else:
|
| 1368 |
+
result = time.strftime('%H:%M:%S', time.gmtime(duration))
|
| 1369 |
+
return result
|
| 1370 |
+
|
| 1371 |
+
@property
|
| 1372 |
+
def ETA(self):
|
| 1373 |
+
if self.done:
|
| 1374 |
+
prefix = 'Done'
|
| 1375 |
+
t = self.elapsed
|
| 1376 |
+
# import pdb; pdb.set_trace()
|
| 1377 |
+
else:
|
| 1378 |
+
prefix = 'ETA '
|
| 1379 |
+
if self.max is None:
|
| 1380 |
+
t = -1
|
| 1381 |
+
elif self.elapsed == 0 or (self.cur == self.min):
|
| 1382 |
+
t = 0
|
| 1383 |
+
else:
|
| 1384 |
+
# import pdb; pdb.set_trace()
|
| 1385 |
+
t = float(self.max - self.min)
|
| 1386 |
+
t /= self.cur - self.min
|
| 1387 |
+
t = (t - 1) * self.elapsed
|
| 1388 |
+
return '%s: %s' % (prefix, self.format_duration(t))
|
| 1389 |
+
|
| 1390 |
+
@property
|
| 1391 |
+
def speed(self):
|
| 1392 |
+
if self.elapsed == 0:
|
| 1393 |
+
result = 0.0
|
| 1394 |
+
else:
|
| 1395 |
+
result = (self.cur - self.min) / self.elapsed
|
| 1396 |
+
for unit in UNITS:
|
| 1397 |
+
if result < 1000:
|
| 1398 |
+
break
|
| 1399 |
+
result /= 1000.0
|
| 1400 |
+
return '%d %sB/s' % (result, unit)
|
| 1401 |
+
|
| 1402 |
+
|
| 1403 |
+
#
|
| 1404 |
+
# Glob functionality
|
| 1405 |
+
#
|
| 1406 |
+
|
| 1407 |
+
RICH_GLOB = re.compile(r'\{([^}]*)\}')
|
| 1408 |
+
_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
|
| 1409 |
+
_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
|
| 1410 |
+
|
| 1411 |
+
|
| 1412 |
+
def iglob(path_glob):
|
| 1413 |
+
"""Extended globbing function that supports ** and {opt1,opt2,opt3}."""
|
| 1414 |
+
if _CHECK_RECURSIVE_GLOB.search(path_glob):
|
| 1415 |
+
msg = """invalid glob %r: recursive glob "**" must be used alone"""
|
| 1416 |
+
raise ValueError(msg % path_glob)
|
| 1417 |
+
if _CHECK_MISMATCH_SET.search(path_glob):
|
| 1418 |
+
msg = """invalid glob %r: mismatching set marker '{' or '}'"""
|
| 1419 |
+
raise ValueError(msg % path_glob)
|
| 1420 |
+
return _iglob(path_glob)
|
| 1421 |
+
|
| 1422 |
+
|
| 1423 |
+
def _iglob(path_glob):
|
| 1424 |
+
rich_path_glob = RICH_GLOB.split(path_glob, 1)
|
| 1425 |
+
if len(rich_path_glob) > 1:
|
| 1426 |
+
assert len(rich_path_glob) == 3, rich_path_glob
|
| 1427 |
+
prefix, set, suffix = rich_path_glob
|
| 1428 |
+
for item in set.split(','):
|
| 1429 |
+
for path in _iglob(''.join((prefix, item, suffix))):
|
| 1430 |
+
yield path
|
| 1431 |
+
else:
|
| 1432 |
+
if '**' not in path_glob:
|
| 1433 |
+
for item in std_iglob(path_glob):
|
| 1434 |
+
yield item
|
| 1435 |
+
else:
|
| 1436 |
+
prefix, radical = path_glob.split('**', 1)
|
| 1437 |
+
if prefix == '':
|
| 1438 |
+
prefix = '.'
|
| 1439 |
+
if radical == '':
|
| 1440 |
+
radical = '*'
|
| 1441 |
+
else:
|
| 1442 |
+
# we support both
|
| 1443 |
+
radical = radical.lstrip('/')
|
| 1444 |
+
radical = radical.lstrip('\\')
|
| 1445 |
+
for path, dir, files in os.walk(prefix):
|
| 1446 |
+
path = os.path.normpath(path)
|
| 1447 |
+
for fn in _iglob(os.path.join(path, radical)):
|
| 1448 |
+
yield fn
|
| 1449 |
+
|
| 1450 |
+
|
| 1451 |
+
if ssl:
|
| 1452 |
+
from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, CertificateError)
|
| 1453 |
+
|
| 1454 |
+
#
|
| 1455 |
+
# HTTPSConnection which verifies certificates/matches domains
|
| 1456 |
+
#
|
| 1457 |
+
|
| 1458 |
+
class HTTPSConnection(httplib.HTTPSConnection):
|
| 1459 |
+
ca_certs = None # set this to the path to the certs file (.pem)
|
| 1460 |
+
check_domain = True # only used if ca_certs is not None
|
| 1461 |
+
|
| 1462 |
+
# noinspection PyPropertyAccess
|
| 1463 |
+
def connect(self):
|
| 1464 |
+
sock = socket.create_connection((self.host, self.port), self.timeout)
|
| 1465 |
+
if getattr(self, '_tunnel_host', False):
|
| 1466 |
+
self.sock = sock
|
| 1467 |
+
self._tunnel()
|
| 1468 |
+
|
| 1469 |
+
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
|
| 1470 |
+
if hasattr(ssl, 'OP_NO_SSLv2'):
|
| 1471 |
+
context.options |= ssl.OP_NO_SSLv2
|
| 1472 |
+
if getattr(self, 'cert_file', None):
|
| 1473 |
+
context.load_cert_chain(self.cert_file, self.key_file)
|
| 1474 |
+
kwargs = {}
|
| 1475 |
+
if self.ca_certs:
|
| 1476 |
+
context.verify_mode = ssl.CERT_REQUIRED
|
| 1477 |
+
context.load_verify_locations(cafile=self.ca_certs)
|
| 1478 |
+
if getattr(ssl, 'HAS_SNI', False):
|
| 1479 |
+
kwargs['server_hostname'] = self.host
|
| 1480 |
+
|
| 1481 |
+
self.sock = context.wrap_socket(sock, **kwargs)
|
| 1482 |
+
if self.ca_certs and self.check_domain:
|
| 1483 |
+
try:
|
| 1484 |
+
match_hostname(self.sock.getpeercert(), self.host)
|
| 1485 |
+
logger.debug('Host verified: %s', self.host)
|
| 1486 |
+
except CertificateError: # pragma: no cover
|
| 1487 |
+
self.sock.shutdown(socket.SHUT_RDWR)
|
| 1488 |
+
self.sock.close()
|
| 1489 |
+
raise
|
| 1490 |
+
|
| 1491 |
+
class HTTPSHandler(BaseHTTPSHandler):
|
| 1492 |
+
|
| 1493 |
+
def __init__(self, ca_certs, check_domain=True):
|
| 1494 |
+
BaseHTTPSHandler.__init__(self)
|
| 1495 |
+
self.ca_certs = ca_certs
|
| 1496 |
+
self.check_domain = check_domain
|
| 1497 |
+
|
| 1498 |
+
def _conn_maker(self, *args, **kwargs):
|
| 1499 |
+
"""
|
| 1500 |
+
This is called to create a connection instance. Normally you'd
|
| 1501 |
+
pass a connection class to do_open, but it doesn't actually check for
|
| 1502 |
+
a class, and just expects a callable. As long as we behave just as a
|
| 1503 |
+
constructor would have, we should be OK. If it ever changes so that
|
| 1504 |
+
we *must* pass a class, we'll create an UnsafeHTTPSConnection class
|
| 1505 |
+
which just sets check_domain to False in the class definition, and
|
| 1506 |
+
choose which one to pass to do_open.
|
| 1507 |
+
"""
|
| 1508 |
+
result = HTTPSConnection(*args, **kwargs)
|
| 1509 |
+
if self.ca_certs:
|
| 1510 |
+
result.ca_certs = self.ca_certs
|
| 1511 |
+
result.check_domain = self.check_domain
|
| 1512 |
+
return result
|
| 1513 |
+
|
| 1514 |
+
def https_open(self, req):
|
| 1515 |
+
try:
|
| 1516 |
+
return self.do_open(self._conn_maker, req)
|
| 1517 |
+
except URLError as e:
|
| 1518 |
+
if 'certificate verify failed' in str(e.reason):
|
| 1519 |
+
raise CertificateError('Unable to verify server certificate '
|
| 1520 |
+
'for %s' % req.host)
|
| 1521 |
+
else:
|
| 1522 |
+
raise
|
| 1523 |
+
|
| 1524 |
+
#
|
| 1525 |
+
# To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-
|
| 1526 |
+
# Middle proxy using HTTP listens on port 443, or an index mistakenly serves
|
| 1527 |
+
# HTML containing a http://xyz link when it should be https://xyz),
|
| 1528 |
+
# you can use the following handler class, which does not allow HTTP traffic.
|
| 1529 |
+
#
|
| 1530 |
+
# It works by inheriting from HTTPHandler - so build_opener won't add a
|
| 1531 |
+
# handler for HTTP itself.
|
| 1532 |
+
#
|
| 1533 |
+
class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):
|
| 1534 |
+
|
| 1535 |
+
def http_open(self, req):
|
| 1536 |
+
raise URLError('Unexpected HTTP request on what should be a secure '
|
| 1537 |
+
'connection: %s' % req)
|
| 1538 |
+
|
| 1539 |
+
|
| 1540 |
+
#
|
| 1541 |
+
# XML-RPC with timeouts
|
| 1542 |
+
#
|
| 1543 |
+
class Transport(xmlrpclib.Transport):
|
| 1544 |
+
|
| 1545 |
+
def __init__(self, timeout, use_datetime=0):
|
| 1546 |
+
self.timeout = timeout
|
| 1547 |
+
xmlrpclib.Transport.__init__(self, use_datetime)
|
| 1548 |
+
|
| 1549 |
+
def make_connection(self, host):
|
| 1550 |
+
h, eh, x509 = self.get_host_info(host)
|
| 1551 |
+
if not self._connection or host != self._connection[0]:
|
| 1552 |
+
self._extra_headers = eh
|
| 1553 |
+
self._connection = host, httplib.HTTPConnection(h)
|
| 1554 |
+
return self._connection[1]
|
| 1555 |
+
|
| 1556 |
+
|
| 1557 |
+
if ssl:
|
| 1558 |
+
|
| 1559 |
+
class SafeTransport(xmlrpclib.SafeTransport):
|
| 1560 |
+
|
| 1561 |
+
def __init__(self, timeout, use_datetime=0):
|
| 1562 |
+
self.timeout = timeout
|
| 1563 |
+
xmlrpclib.SafeTransport.__init__(self, use_datetime)
|
| 1564 |
+
|
| 1565 |
+
def make_connection(self, host):
|
| 1566 |
+
h, eh, kwargs = self.get_host_info(host)
|
| 1567 |
+
if not kwargs:
|
| 1568 |
+
kwargs = {}
|
| 1569 |
+
kwargs['timeout'] = self.timeout
|
| 1570 |
+
if not self._connection or host != self._connection[0]:
|
| 1571 |
+
self._extra_headers = eh
|
| 1572 |
+
self._connection = host, httplib.HTTPSConnection(h, None, **kwargs)
|
| 1573 |
+
return self._connection[1]
|
| 1574 |
+
|
| 1575 |
+
|
| 1576 |
+
class ServerProxy(xmlrpclib.ServerProxy):
|
| 1577 |
+
|
| 1578 |
+
def __init__(self, uri, **kwargs):
|
| 1579 |
+
self.timeout = timeout = kwargs.pop('timeout', None)
|
| 1580 |
+
# The above classes only come into play if a timeout
|
| 1581 |
+
# is specified
|
| 1582 |
+
if timeout is not None:
|
| 1583 |
+
# scheme = splittype(uri) # deprecated as of Python 3.8
|
| 1584 |
+
scheme = urlparse(uri)[0]
|
| 1585 |
+
use_datetime = kwargs.get('use_datetime', 0)
|
| 1586 |
+
if scheme == 'https':
|
| 1587 |
+
tcls = SafeTransport
|
| 1588 |
+
else:
|
| 1589 |
+
tcls = Transport
|
| 1590 |
+
kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)
|
| 1591 |
+
self.transport = t
|
| 1592 |
+
xmlrpclib.ServerProxy.__init__(self, uri, **kwargs)
|
| 1593 |
+
|
| 1594 |
+
|
| 1595 |
+
#
|
| 1596 |
+
# CSV functionality. This is provided because on 2.x, the csv module can't
|
| 1597 |
+
# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.
|
| 1598 |
+
#
|
| 1599 |
+
|
| 1600 |
+
|
| 1601 |
+
def _csv_open(fn, mode, **kwargs):
|
| 1602 |
+
if sys.version_info[0] < 3:
|
| 1603 |
+
mode += 'b'
|
| 1604 |
+
else:
|
| 1605 |
+
kwargs['newline'] = ''
|
| 1606 |
+
# Python 3 determines encoding from locale. Force 'utf-8'
|
| 1607 |
+
# file encoding to match other forced utf-8 encoding
|
| 1608 |
+
kwargs['encoding'] = 'utf-8'
|
| 1609 |
+
return open(fn, mode, **kwargs)
|
| 1610 |
+
|
| 1611 |
+
|
| 1612 |
+
class CSVBase(object):
|
| 1613 |
+
defaults = {
|
| 1614 |
+
'delimiter': str(','), # The strs are used because we need native
|
| 1615 |
+
'quotechar': str('"'), # str in the csv API (2.x won't take
|
| 1616 |
+
'lineterminator': str('\n') # Unicode)
|
| 1617 |
+
}
|
| 1618 |
+
|
| 1619 |
+
def __enter__(self):
|
| 1620 |
+
return self
|
| 1621 |
+
|
| 1622 |
+
def __exit__(self, *exc_info):
|
| 1623 |
+
self.stream.close()
|
| 1624 |
+
|
| 1625 |
+
|
| 1626 |
+
class CSVReader(CSVBase):
|
| 1627 |
+
|
| 1628 |
+
def __init__(self, **kwargs):
|
| 1629 |
+
if 'stream' in kwargs:
|
| 1630 |
+
stream = kwargs['stream']
|
| 1631 |
+
if sys.version_info[0] >= 3:
|
| 1632 |
+
# needs to be a text stream
|
| 1633 |
+
stream = codecs.getreader('utf-8')(stream)
|
| 1634 |
+
self.stream = stream
|
| 1635 |
+
else:
|
| 1636 |
+
self.stream = _csv_open(kwargs['path'], 'r')
|
| 1637 |
+
self.reader = csv.reader(self.stream, **self.defaults)
|
| 1638 |
+
|
| 1639 |
+
def __iter__(self):
|
| 1640 |
+
return self
|
| 1641 |
+
|
| 1642 |
+
def next(self):
|
| 1643 |
+
result = next(self.reader)
|
| 1644 |
+
if sys.version_info[0] < 3:
|
| 1645 |
+
for i, item in enumerate(result):
|
| 1646 |
+
if not isinstance(item, text_type):
|
| 1647 |
+
result[i] = item.decode('utf-8')
|
| 1648 |
+
return result
|
| 1649 |
+
|
| 1650 |
+
__next__ = next
|
| 1651 |
+
|
| 1652 |
+
|
| 1653 |
+
class CSVWriter(CSVBase):
|
| 1654 |
+
|
| 1655 |
+
def __init__(self, fn, **kwargs):
|
| 1656 |
+
self.stream = _csv_open(fn, 'w')
|
| 1657 |
+
self.writer = csv.writer(self.stream, **self.defaults)
|
| 1658 |
+
|
| 1659 |
+
def writerow(self, row):
|
| 1660 |
+
if sys.version_info[0] < 3:
|
| 1661 |
+
r = []
|
| 1662 |
+
for item in row:
|
| 1663 |
+
if isinstance(item, text_type):
|
| 1664 |
+
item = item.encode('utf-8')
|
| 1665 |
+
r.append(item)
|
| 1666 |
+
row = r
|
| 1667 |
+
self.writer.writerow(row)
|
| 1668 |
+
|
| 1669 |
+
|
| 1670 |
+
#
|
| 1671 |
+
# Configurator functionality
|
| 1672 |
+
#
|
| 1673 |
+
|
| 1674 |
+
|
| 1675 |
+
class Configurator(BaseConfigurator):
|
| 1676 |
+
|
| 1677 |
+
value_converters = dict(BaseConfigurator.value_converters)
|
| 1678 |
+
value_converters['inc'] = 'inc_convert'
|
| 1679 |
+
|
| 1680 |
+
def __init__(self, config, base=None):
|
| 1681 |
+
super(Configurator, self).__init__(config)
|
| 1682 |
+
self.base = base or os.getcwd()
|
| 1683 |
+
|
| 1684 |
+
def configure_custom(self, config):
|
| 1685 |
+
|
| 1686 |
+
def convert(o):
|
| 1687 |
+
if isinstance(o, (list, tuple)):
|
| 1688 |
+
result = type(o)([convert(i) for i in o])
|
| 1689 |
+
elif isinstance(o, dict):
|
| 1690 |
+
if '()' in o:
|
| 1691 |
+
result = self.configure_custom(o)
|
| 1692 |
+
else:
|
| 1693 |
+
result = {}
|
| 1694 |
+
for k in o:
|
| 1695 |
+
result[k] = convert(o[k])
|
| 1696 |
+
else:
|
| 1697 |
+
result = self.convert(o)
|
| 1698 |
+
return result
|
| 1699 |
+
|
| 1700 |
+
c = config.pop('()')
|
| 1701 |
+
if not callable(c):
|
| 1702 |
+
c = self.resolve(c)
|
| 1703 |
+
props = config.pop('.', None)
|
| 1704 |
+
# Check for valid identifiers
|
| 1705 |
+
args = config.pop('[]', ())
|
| 1706 |
+
if args:
|
| 1707 |
+
args = tuple([convert(o) for o in args])
|
| 1708 |
+
items = [(k, convert(config[k])) for k in config if valid_ident(k)]
|
| 1709 |
+
kwargs = dict(items)
|
| 1710 |
+
result = c(*args, **kwargs)
|
| 1711 |
+
if props:
|
| 1712 |
+
for n, v in props.items():
|
| 1713 |
+
setattr(result, n, convert(v))
|
| 1714 |
+
return result
|
| 1715 |
+
|
| 1716 |
+
def __getitem__(self, key):
|
| 1717 |
+
result = self.config[key]
|
| 1718 |
+
if isinstance(result, dict) and '()' in result:
|
| 1719 |
+
self.config[key] = result = self.configure_custom(result)
|
| 1720 |
+
return result
|
| 1721 |
+
|
| 1722 |
+
def inc_convert(self, value):
|
| 1723 |
+
"""Default converter for the inc:// protocol."""
|
| 1724 |
+
if not os.path.isabs(value):
|
| 1725 |
+
value = os.path.join(self.base, value)
|
| 1726 |
+
with codecs.open(value, 'r', encoding='utf-8') as f:
|
| 1727 |
+
result = json.load(f)
|
| 1728 |
+
return result
|
| 1729 |
+
|
| 1730 |
+
|
| 1731 |
+
class SubprocessMixin(object):
|
| 1732 |
+
"""
|
| 1733 |
+
Mixin for running subprocesses and capturing their output
|
| 1734 |
+
"""
|
| 1735 |
+
|
| 1736 |
+
def __init__(self, verbose=False, progress=None):
|
| 1737 |
+
self.verbose = verbose
|
| 1738 |
+
self.progress = progress
|
| 1739 |
+
|
| 1740 |
+
def reader(self, stream, context):
|
| 1741 |
+
"""
|
| 1742 |
+
Read lines from a subprocess' output stream and either pass to a progress
|
| 1743 |
+
callable (if specified) or write progress information to sys.stderr.
|
| 1744 |
+
"""
|
| 1745 |
+
progress = self.progress
|
| 1746 |
+
verbose = self.verbose
|
| 1747 |
+
while True:
|
| 1748 |
+
s = stream.readline()
|
| 1749 |
+
if not s:
|
| 1750 |
+
break
|
| 1751 |
+
if progress is not None:
|
| 1752 |
+
progress(s, context)
|
| 1753 |
+
else:
|
| 1754 |
+
if not verbose:
|
| 1755 |
+
sys.stderr.write('.')
|
| 1756 |
+
else:
|
| 1757 |
+
sys.stderr.write(s.decode('utf-8'))
|
| 1758 |
+
sys.stderr.flush()
|
| 1759 |
+
stream.close()
|
| 1760 |
+
|
| 1761 |
+
def run_command(self, cmd, **kwargs):
|
| 1762 |
+
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
|
| 1763 |
+
t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
|
| 1764 |
+
t1.start()
|
| 1765 |
+
t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))
|
| 1766 |
+
t2.start()
|
| 1767 |
+
p.wait()
|
| 1768 |
+
t1.join()
|
| 1769 |
+
t2.join()
|
| 1770 |
+
if self.progress is not None:
|
| 1771 |
+
self.progress('done.', 'main')
|
| 1772 |
+
elif self.verbose:
|
| 1773 |
+
sys.stderr.write('done.\n')
|
| 1774 |
+
return p
|
| 1775 |
+
|
| 1776 |
+
|
| 1777 |
+
def normalize_name(name):
|
| 1778 |
+
"""Normalize a python package name a la PEP 503"""
|
| 1779 |
+
# https://www.python.org/dev/peps/pep-0503/#normalized-names
|
| 1780 |
+
return re.sub('[-_.]+', '-', name).lower()
|
| 1781 |
+
|
| 1782 |
+
|
| 1783 |
+
# def _get_pypirc_command():
|
| 1784 |
+
# """
|
| 1785 |
+
# Get the distutils command for interacting with PyPI configurations.
|
| 1786 |
+
# :return: the command.
|
| 1787 |
+
# """
|
| 1788 |
+
# from distutils.core import Distribution
|
| 1789 |
+
# from distutils.config import PyPIRCCommand
|
| 1790 |
+
# d = Distribution()
|
| 1791 |
+
# return PyPIRCCommand(d)
|
| 1792 |
+
|
| 1793 |
+
|
| 1794 |
+
class PyPIRCFile(object):
|
| 1795 |
+
|
| 1796 |
+
DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/'
|
| 1797 |
+
DEFAULT_REALM = 'pypi'
|
| 1798 |
+
|
| 1799 |
+
def __init__(self, fn=None, url=None):
|
| 1800 |
+
if fn is None:
|
| 1801 |
+
fn = os.path.join(os.path.expanduser('~'), '.pypirc')
|
| 1802 |
+
self.filename = fn
|
| 1803 |
+
self.url = url
|
| 1804 |
+
|
| 1805 |
+
def read(self):
|
| 1806 |
+
result = {}
|
| 1807 |
+
|
| 1808 |
+
if os.path.exists(self.filename):
|
| 1809 |
+
repository = self.url or self.DEFAULT_REPOSITORY
|
| 1810 |
+
|
| 1811 |
+
config = configparser.RawConfigParser()
|
| 1812 |
+
config.read(self.filename)
|
| 1813 |
+
sections = config.sections()
|
| 1814 |
+
if 'distutils' in sections:
|
| 1815 |
+
# let's get the list of servers
|
| 1816 |
+
index_servers = config.get('distutils', 'index-servers')
|
| 1817 |
+
_servers = [server.strip() for server in index_servers.split('\n') if server.strip() != '']
|
| 1818 |
+
if _servers == []:
|
| 1819 |
+
# nothing set, let's try to get the default pypi
|
| 1820 |
+
if 'pypi' in sections:
|
| 1821 |
+
_servers = ['pypi']
|
| 1822 |
+
else:
|
| 1823 |
+
for server in _servers:
|
| 1824 |
+
result = {'server': server}
|
| 1825 |
+
result['username'] = config.get(server, 'username')
|
| 1826 |
+
|
| 1827 |
+
# optional params
|
| 1828 |
+
for key, default in (('repository', self.DEFAULT_REPOSITORY), ('realm', self.DEFAULT_REALM),
|
| 1829 |
+
('password', None)):
|
| 1830 |
+
if config.has_option(server, key):
|
| 1831 |
+
result[key] = config.get(server, key)
|
| 1832 |
+
else:
|
| 1833 |
+
result[key] = default
|
| 1834 |
+
|
| 1835 |
+
# work around people having "repository" for the "pypi"
|
| 1836 |
+
# section of their config set to the HTTP (rather than
|
| 1837 |
+
# HTTPS) URL
|
| 1838 |
+
if (server == 'pypi' and repository in (self.DEFAULT_REPOSITORY, 'pypi')):
|
| 1839 |
+
result['repository'] = self.DEFAULT_REPOSITORY
|
| 1840 |
+
elif (result['server'] != repository and result['repository'] != repository):
|
| 1841 |
+
result = {}
|
| 1842 |
+
elif 'server-login' in sections:
|
| 1843 |
+
# old format
|
| 1844 |
+
server = 'server-login'
|
| 1845 |
+
if config.has_option(server, 'repository'):
|
| 1846 |
+
repository = config.get(server, 'repository')
|
| 1847 |
+
else:
|
| 1848 |
+
repository = self.DEFAULT_REPOSITORY
|
| 1849 |
+
result = {
|
| 1850 |
+
'username': config.get(server, 'username'),
|
| 1851 |
+
'password': config.get(server, 'password'),
|
| 1852 |
+
'repository': repository,
|
| 1853 |
+
'server': server,
|
| 1854 |
+
'realm': self.DEFAULT_REALM
|
| 1855 |
+
}
|
| 1856 |
+
return result
|
| 1857 |
+
|
| 1858 |
+
def update(self, username, password):
|
| 1859 |
+
# import pdb; pdb.set_trace()
|
| 1860 |
+
config = configparser.RawConfigParser()
|
| 1861 |
+
fn = self.filename
|
| 1862 |
+
config.read(fn)
|
| 1863 |
+
if not config.has_section('pypi'):
|
| 1864 |
+
config.add_section('pypi')
|
| 1865 |
+
config.set('pypi', 'username', username)
|
| 1866 |
+
config.set('pypi', 'password', password)
|
| 1867 |
+
with open(fn, 'w') as f:
|
| 1868 |
+
config.write(f)
|
| 1869 |
+
|
| 1870 |
+
|
| 1871 |
+
def _load_pypirc(index):
|
| 1872 |
+
"""
|
| 1873 |
+
Read the PyPI access configuration as supported by distutils.
|
| 1874 |
+
"""
|
| 1875 |
+
return PyPIRCFile(url=index.url).read()
|
| 1876 |
+
|
| 1877 |
+
|
| 1878 |
+
def _store_pypirc(index):
|
| 1879 |
+
PyPIRCFile().update(index.username, index.password)
|
| 1880 |
+
|
| 1881 |
+
|
| 1882 |
+
#
|
| 1883 |
+
# get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor
|
| 1884 |
+
# tweaks
|
| 1885 |
+
#
|
| 1886 |
+
|
| 1887 |
+
|
| 1888 |
+
def get_host_platform():
|
| 1889 |
+
"""Return a string that identifies the current platform. This is used mainly to
|
| 1890 |
+
distinguish platform-specific build directories and platform-specific built
|
| 1891 |
+
distributions. Typically includes the OS name and version and the
|
| 1892 |
+
architecture (as supplied by 'os.uname()'), although the exact information
|
| 1893 |
+
included depends on the OS; eg. on Linux, the kernel version isn't
|
| 1894 |
+
particularly important.
|
| 1895 |
+
|
| 1896 |
+
Examples of returned values:
|
| 1897 |
+
linux-i586
|
| 1898 |
+
linux-alpha (?)
|
| 1899 |
+
solaris-2.6-sun4u
|
| 1900 |
+
|
| 1901 |
+
Windows will return one of:
|
| 1902 |
+
win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
|
| 1903 |
+
win32 (all others - specifically, sys.platform is returned)
|
| 1904 |
+
|
| 1905 |
+
For other non-POSIX platforms, currently just returns 'sys.platform'.
|
| 1906 |
+
|
| 1907 |
+
"""
|
| 1908 |
+
if os.name == 'nt':
|
| 1909 |
+
if 'amd64' in sys.version.lower():
|
| 1910 |
+
return 'win-amd64'
|
| 1911 |
+
if '(arm)' in sys.version.lower():
|
| 1912 |
+
return 'win-arm32'
|
| 1913 |
+
if '(arm64)' in sys.version.lower():
|
| 1914 |
+
return 'win-arm64'
|
| 1915 |
+
return sys.platform
|
| 1916 |
+
|
| 1917 |
+
# Set for cross builds explicitly
|
| 1918 |
+
if "_PYTHON_HOST_PLATFORM" in os.environ:
|
| 1919 |
+
return os.environ["_PYTHON_HOST_PLATFORM"]
|
| 1920 |
+
|
| 1921 |
+
if os.name != 'posix' or not hasattr(os, 'uname'):
|
| 1922 |
+
# XXX what about the architecture? NT is Intel or Alpha,
|
| 1923 |
+
# Mac OS is M68k or PPC, etc.
|
| 1924 |
+
return sys.platform
|
| 1925 |
+
|
| 1926 |
+
# Try to distinguish various flavours of Unix
|
| 1927 |
+
|
| 1928 |
+
(osname, host, release, version, machine) = os.uname()
|
| 1929 |
+
|
| 1930 |
+
# Convert the OS name to lowercase, remove '/' characters, and translate
|
| 1931 |
+
# spaces (for "Power Macintosh")
|
| 1932 |
+
osname = osname.lower().replace('/', '')
|
| 1933 |
+
machine = machine.replace(' ', '_').replace('/', '-')
|
| 1934 |
+
|
| 1935 |
+
if osname[:5] == 'linux':
|
| 1936 |
+
# At least on Linux/Intel, 'machine' is the processor --
|
| 1937 |
+
# i386, etc.
|
| 1938 |
+
# XXX what about Alpha, SPARC, etc?
|
| 1939 |
+
return "%s-%s" % (osname, machine)
|
| 1940 |
+
|
| 1941 |
+
elif osname[:5] == 'sunos':
|
| 1942 |
+
if release[0] >= '5': # SunOS 5 == Solaris 2
|
| 1943 |
+
osname = 'solaris'
|
| 1944 |
+
release = '%d.%s' % (int(release[0]) - 3, release[2:])
|
| 1945 |
+
# We can't use 'platform.architecture()[0]' because a
|
| 1946 |
+
# bootstrap problem. We use a dict to get an error
|
| 1947 |
+
# if some suspicious happens.
|
| 1948 |
+
bitness = {2147483647: '32bit', 9223372036854775807: '64bit'}
|
| 1949 |
+
machine += '.%s' % bitness[sys.maxsize]
|
| 1950 |
+
# fall through to standard osname-release-machine representation
|
| 1951 |
+
elif osname[:3] == 'aix':
|
| 1952 |
+
from _aix_support import aix_platform
|
| 1953 |
+
return aix_platform()
|
| 1954 |
+
elif osname[:6] == 'cygwin':
|
| 1955 |
+
osname = 'cygwin'
|
| 1956 |
+
rel_re = re.compile(r'[\d.]+', re.ASCII)
|
| 1957 |
+
m = rel_re.match(release)
|
| 1958 |
+
if m:
|
| 1959 |
+
release = m.group()
|
| 1960 |
+
elif osname[:6] == 'darwin':
|
| 1961 |
+
import _osx_support
|
| 1962 |
+
try:
|
| 1963 |
+
from distutils import sysconfig
|
| 1964 |
+
except ImportError:
|
| 1965 |
+
import sysconfig
|
| 1966 |
+
osname, release, machine = _osx_support.get_platform_osx(sysconfig.get_config_vars(), osname, release, machine)
|
| 1967 |
+
|
| 1968 |
+
return '%s-%s-%s' % (osname, release, machine)
|
| 1969 |
+
|
| 1970 |
+
|
| 1971 |
+
_TARGET_TO_PLAT = {
|
| 1972 |
+
'x86': 'win32',
|
| 1973 |
+
'x64': 'win-amd64',
|
| 1974 |
+
'arm': 'win-arm32',
|
| 1975 |
+
}
|
| 1976 |
+
|
| 1977 |
+
|
| 1978 |
+
def get_platform():
|
| 1979 |
+
if os.name != 'nt':
|
| 1980 |
+
return get_host_platform()
|
| 1981 |
+
cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH')
|
| 1982 |
+
if cross_compilation_target not in _TARGET_TO_PLAT:
|
| 1983 |
+
return get_host_platform()
|
| 1984 |
+
return _TARGET_TO_PLAT[cross_compilation_target]
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/version.py
ADDED
|
@@ -0,0 +1,750 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
#
|
| 3 |
+
# Copyright (C) 2012-2023 The Python Software Foundation.
|
| 4 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 5 |
+
#
|
| 6 |
+
"""
|
| 7 |
+
Implementation of a flexible versioning scheme providing support for PEP-440,
|
| 8 |
+
setuptools-compatible and semantic versioning.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import logging
|
| 12 |
+
import re
|
| 13 |
+
|
| 14 |
+
from .compat import string_types
|
| 15 |
+
from .util import parse_requirement
|
| 16 |
+
|
| 17 |
+
__all__ = ['NormalizedVersion', 'NormalizedMatcher',
|
| 18 |
+
'LegacyVersion', 'LegacyMatcher',
|
| 19 |
+
'SemanticVersion', 'SemanticMatcher',
|
| 20 |
+
'UnsupportedVersionError', 'get_scheme']
|
| 21 |
+
|
| 22 |
+
logger = logging.getLogger(__name__)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class UnsupportedVersionError(ValueError):
|
| 26 |
+
"""This is an unsupported version."""
|
| 27 |
+
pass
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class Version(object):
|
| 31 |
+
def __init__(self, s):
|
| 32 |
+
self._string = s = s.strip()
|
| 33 |
+
self._parts = parts = self.parse(s)
|
| 34 |
+
assert isinstance(parts, tuple)
|
| 35 |
+
assert len(parts) > 0
|
| 36 |
+
|
| 37 |
+
def parse(self, s):
|
| 38 |
+
raise NotImplementedError('please implement in a subclass')
|
| 39 |
+
|
| 40 |
+
def _check_compatible(self, other):
|
| 41 |
+
if type(self) != type(other):
|
| 42 |
+
raise TypeError('cannot compare %r and %r' % (self, other))
|
| 43 |
+
|
| 44 |
+
def __eq__(self, other):
|
| 45 |
+
self._check_compatible(other)
|
| 46 |
+
return self._parts == other._parts
|
| 47 |
+
|
| 48 |
+
def __ne__(self, other):
|
| 49 |
+
return not self.__eq__(other)
|
| 50 |
+
|
| 51 |
+
def __lt__(self, other):
|
| 52 |
+
self._check_compatible(other)
|
| 53 |
+
return self._parts < other._parts
|
| 54 |
+
|
| 55 |
+
def __gt__(self, other):
|
| 56 |
+
return not (self.__lt__(other) or self.__eq__(other))
|
| 57 |
+
|
| 58 |
+
def __le__(self, other):
|
| 59 |
+
return self.__lt__(other) or self.__eq__(other)
|
| 60 |
+
|
| 61 |
+
def __ge__(self, other):
|
| 62 |
+
return self.__gt__(other) or self.__eq__(other)
|
| 63 |
+
|
| 64 |
+
# See http://docs.python.org/reference/datamodel#object.__hash__
|
| 65 |
+
def __hash__(self):
|
| 66 |
+
return hash(self._parts)
|
| 67 |
+
|
| 68 |
+
def __repr__(self):
|
| 69 |
+
return "%s('%s')" % (self.__class__.__name__, self._string)
|
| 70 |
+
|
| 71 |
+
def __str__(self):
|
| 72 |
+
return self._string
|
| 73 |
+
|
| 74 |
+
@property
|
| 75 |
+
def is_prerelease(self):
|
| 76 |
+
raise NotImplementedError('Please implement in subclasses.')
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class Matcher(object):
|
| 80 |
+
version_class = None
|
| 81 |
+
|
| 82 |
+
# value is either a callable or the name of a method
|
| 83 |
+
_operators = {
|
| 84 |
+
'<': lambda v, c, p: v < c,
|
| 85 |
+
'>': lambda v, c, p: v > c,
|
| 86 |
+
'<=': lambda v, c, p: v == c or v < c,
|
| 87 |
+
'>=': lambda v, c, p: v == c or v > c,
|
| 88 |
+
'==': lambda v, c, p: v == c,
|
| 89 |
+
'===': lambda v, c, p: v == c,
|
| 90 |
+
# by default, compatible => >=.
|
| 91 |
+
'~=': lambda v, c, p: v == c or v > c,
|
| 92 |
+
'!=': lambda v, c, p: v != c,
|
| 93 |
+
}
|
| 94 |
+
|
| 95 |
+
# this is a method only to support alternative implementations
|
| 96 |
+
# via overriding
|
| 97 |
+
def parse_requirement(self, s):
|
| 98 |
+
return parse_requirement(s)
|
| 99 |
+
|
| 100 |
+
def __init__(self, s):
|
| 101 |
+
if self.version_class is None:
|
| 102 |
+
raise ValueError('Please specify a version class')
|
| 103 |
+
self._string = s = s.strip()
|
| 104 |
+
r = self.parse_requirement(s)
|
| 105 |
+
if not r:
|
| 106 |
+
raise ValueError('Not valid: %r' % s)
|
| 107 |
+
self.name = r.name
|
| 108 |
+
self.key = self.name.lower() # for case-insensitive comparisons
|
| 109 |
+
clist = []
|
| 110 |
+
if r.constraints:
|
| 111 |
+
# import pdb; pdb.set_trace()
|
| 112 |
+
for op, s in r.constraints:
|
| 113 |
+
if s.endswith('.*'):
|
| 114 |
+
if op not in ('==', '!='):
|
| 115 |
+
raise ValueError('\'.*\' not allowed for '
|
| 116 |
+
'%r constraints' % op)
|
| 117 |
+
# Could be a partial version (e.g. for '2.*') which
|
| 118 |
+
# won't parse as a version, so keep it as a string
|
| 119 |
+
vn, prefix = s[:-2], True
|
| 120 |
+
# Just to check that vn is a valid version
|
| 121 |
+
self.version_class(vn)
|
| 122 |
+
else:
|
| 123 |
+
# Should parse as a version, so we can create an
|
| 124 |
+
# instance for the comparison
|
| 125 |
+
vn, prefix = self.version_class(s), False
|
| 126 |
+
clist.append((op, vn, prefix))
|
| 127 |
+
self._parts = tuple(clist)
|
| 128 |
+
|
| 129 |
+
def match(self, version):
|
| 130 |
+
"""
|
| 131 |
+
Check if the provided version matches the constraints.
|
| 132 |
+
|
| 133 |
+
:param version: The version to match against this instance.
|
| 134 |
+
:type version: String or :class:`Version` instance.
|
| 135 |
+
"""
|
| 136 |
+
if isinstance(version, string_types):
|
| 137 |
+
version = self.version_class(version)
|
| 138 |
+
for operator, constraint, prefix in self._parts:
|
| 139 |
+
f = self._operators.get(operator)
|
| 140 |
+
if isinstance(f, string_types):
|
| 141 |
+
f = getattr(self, f)
|
| 142 |
+
if not f:
|
| 143 |
+
msg = ('%r not implemented '
|
| 144 |
+
'for %s' % (operator, self.__class__.__name__))
|
| 145 |
+
raise NotImplementedError(msg)
|
| 146 |
+
if not f(version, constraint, prefix):
|
| 147 |
+
return False
|
| 148 |
+
return True
|
| 149 |
+
|
| 150 |
+
@property
|
| 151 |
+
def exact_version(self):
|
| 152 |
+
result = None
|
| 153 |
+
if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='):
|
| 154 |
+
result = self._parts[0][1]
|
| 155 |
+
return result
|
| 156 |
+
|
| 157 |
+
def _check_compatible(self, other):
|
| 158 |
+
if type(self) != type(other) or self.name != other.name:
|
| 159 |
+
raise TypeError('cannot compare %s and %s' % (self, other))
|
| 160 |
+
|
| 161 |
+
def __eq__(self, other):
|
| 162 |
+
self._check_compatible(other)
|
| 163 |
+
return self.key == other.key and self._parts == other._parts
|
| 164 |
+
|
| 165 |
+
def __ne__(self, other):
|
| 166 |
+
return not self.__eq__(other)
|
| 167 |
+
|
| 168 |
+
# See http://docs.python.org/reference/datamodel#object.__hash__
|
| 169 |
+
def __hash__(self):
|
| 170 |
+
return hash(self.key) + hash(self._parts)
|
| 171 |
+
|
| 172 |
+
def __repr__(self):
|
| 173 |
+
return "%s(%r)" % (self.__class__.__name__, self._string)
|
| 174 |
+
|
| 175 |
+
def __str__(self):
|
| 176 |
+
return self._string
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|alpha|b|beta|c|rc|pre|preview)(\d+)?)?'
|
| 180 |
+
r'(\.(post|r|rev)(\d+)?)?([._-]?(dev)(\d+)?)?'
|
| 181 |
+
r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$', re.I)
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
def _pep_440_key(s):
|
| 185 |
+
s = s.strip()
|
| 186 |
+
m = PEP440_VERSION_RE.match(s)
|
| 187 |
+
if not m:
|
| 188 |
+
raise UnsupportedVersionError('Not a valid version: %s' % s)
|
| 189 |
+
groups = m.groups()
|
| 190 |
+
nums = tuple(int(v) for v in groups[1].split('.'))
|
| 191 |
+
while len(nums) > 1 and nums[-1] == 0:
|
| 192 |
+
nums = nums[:-1]
|
| 193 |
+
|
| 194 |
+
if not groups[0]:
|
| 195 |
+
epoch = 0
|
| 196 |
+
else:
|
| 197 |
+
epoch = int(groups[0][:-1])
|
| 198 |
+
pre = groups[4:6]
|
| 199 |
+
post = groups[7:9]
|
| 200 |
+
dev = groups[10:12]
|
| 201 |
+
local = groups[13]
|
| 202 |
+
if pre == (None, None):
|
| 203 |
+
pre = ()
|
| 204 |
+
else:
|
| 205 |
+
if pre[1] is None:
|
| 206 |
+
pre = pre[0], 0
|
| 207 |
+
else:
|
| 208 |
+
pre = pre[0], int(pre[1])
|
| 209 |
+
if post == (None, None):
|
| 210 |
+
post = ()
|
| 211 |
+
else:
|
| 212 |
+
if post[1] is None:
|
| 213 |
+
post = post[0], 0
|
| 214 |
+
else:
|
| 215 |
+
post = post[0], int(post[1])
|
| 216 |
+
if dev == (None, None):
|
| 217 |
+
dev = ()
|
| 218 |
+
else:
|
| 219 |
+
if dev[1] is None:
|
| 220 |
+
dev = dev[0], 0
|
| 221 |
+
else:
|
| 222 |
+
dev = dev[0], int(dev[1])
|
| 223 |
+
if local is None:
|
| 224 |
+
local = ()
|
| 225 |
+
else:
|
| 226 |
+
parts = []
|
| 227 |
+
for part in local.split('.'):
|
| 228 |
+
# to ensure that numeric compares as > lexicographic, avoid
|
| 229 |
+
# comparing them directly, but encode a tuple which ensures
|
| 230 |
+
# correct sorting
|
| 231 |
+
if part.isdigit():
|
| 232 |
+
part = (1, int(part))
|
| 233 |
+
else:
|
| 234 |
+
part = (0, part)
|
| 235 |
+
parts.append(part)
|
| 236 |
+
local = tuple(parts)
|
| 237 |
+
if not pre:
|
| 238 |
+
# either before pre-release, or final release and after
|
| 239 |
+
if not post and dev:
|
| 240 |
+
# before pre-release
|
| 241 |
+
pre = ('a', -1) # to sort before a0
|
| 242 |
+
else:
|
| 243 |
+
pre = ('z',) # to sort after all pre-releases
|
| 244 |
+
# now look at the state of post and dev.
|
| 245 |
+
if not post:
|
| 246 |
+
post = ('_',) # sort before 'a'
|
| 247 |
+
if not dev:
|
| 248 |
+
dev = ('final',)
|
| 249 |
+
|
| 250 |
+
return epoch, nums, pre, post, dev, local
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
_normalized_key = _pep_440_key
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
class NormalizedVersion(Version):
|
| 257 |
+
"""A rational version.
|
| 258 |
+
|
| 259 |
+
Good:
|
| 260 |
+
1.2 # equivalent to "1.2.0"
|
| 261 |
+
1.2.0
|
| 262 |
+
1.2a1
|
| 263 |
+
1.2.3a2
|
| 264 |
+
1.2.3b1
|
| 265 |
+
1.2.3c1
|
| 266 |
+
1.2.3.4
|
| 267 |
+
TODO: fill this out
|
| 268 |
+
|
| 269 |
+
Bad:
|
| 270 |
+
1 # minimum two numbers
|
| 271 |
+
1.2a # release level must have a release serial
|
| 272 |
+
1.2.3b
|
| 273 |
+
"""
|
| 274 |
+
def parse(self, s):
|
| 275 |
+
result = _normalized_key(s)
|
| 276 |
+
# _normalized_key loses trailing zeroes in the release
|
| 277 |
+
# clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0
|
| 278 |
+
# However, PEP 440 prefix matching needs it: for example,
|
| 279 |
+
# (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0).
|
| 280 |
+
m = PEP440_VERSION_RE.match(s) # must succeed
|
| 281 |
+
groups = m.groups()
|
| 282 |
+
self._release_clause = tuple(int(v) for v in groups[1].split('.'))
|
| 283 |
+
return result
|
| 284 |
+
|
| 285 |
+
PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev'])
|
| 286 |
+
|
| 287 |
+
@property
|
| 288 |
+
def is_prerelease(self):
|
| 289 |
+
return any(t[0] in self.PREREL_TAGS for t in self._parts if t)
|
| 290 |
+
|
| 291 |
+
|
| 292 |
+
def _match_prefix(x, y):
|
| 293 |
+
x = str(x)
|
| 294 |
+
y = str(y)
|
| 295 |
+
if x == y:
|
| 296 |
+
return True
|
| 297 |
+
if not x.startswith(y):
|
| 298 |
+
return False
|
| 299 |
+
n = len(y)
|
| 300 |
+
return x[n] == '.'
|
| 301 |
+
|
| 302 |
+
|
| 303 |
+
class NormalizedMatcher(Matcher):
|
| 304 |
+
version_class = NormalizedVersion
|
| 305 |
+
|
| 306 |
+
# value is either a callable or the name of a method
|
| 307 |
+
_operators = {
|
| 308 |
+
'~=': '_match_compatible',
|
| 309 |
+
'<': '_match_lt',
|
| 310 |
+
'>': '_match_gt',
|
| 311 |
+
'<=': '_match_le',
|
| 312 |
+
'>=': '_match_ge',
|
| 313 |
+
'==': '_match_eq',
|
| 314 |
+
'===': '_match_arbitrary',
|
| 315 |
+
'!=': '_match_ne',
|
| 316 |
+
}
|
| 317 |
+
|
| 318 |
+
def _adjust_local(self, version, constraint, prefix):
|
| 319 |
+
if prefix:
|
| 320 |
+
strip_local = '+' not in constraint and version._parts[-1]
|
| 321 |
+
else:
|
| 322 |
+
# both constraint and version are
|
| 323 |
+
# NormalizedVersion instances.
|
| 324 |
+
# If constraint does not have a local component,
|
| 325 |
+
# ensure the version doesn't, either.
|
| 326 |
+
strip_local = not constraint._parts[-1] and version._parts[-1]
|
| 327 |
+
if strip_local:
|
| 328 |
+
s = version._string.split('+', 1)[0]
|
| 329 |
+
version = self.version_class(s)
|
| 330 |
+
return version, constraint
|
| 331 |
+
|
| 332 |
+
def _match_lt(self, version, constraint, prefix):
|
| 333 |
+
version, constraint = self._adjust_local(version, constraint, prefix)
|
| 334 |
+
if version >= constraint:
|
| 335 |
+
return False
|
| 336 |
+
release_clause = constraint._release_clause
|
| 337 |
+
pfx = '.'.join([str(i) for i in release_clause])
|
| 338 |
+
return not _match_prefix(version, pfx)
|
| 339 |
+
|
| 340 |
+
def _match_gt(self, version, constraint, prefix):
|
| 341 |
+
version, constraint = self._adjust_local(version, constraint, prefix)
|
| 342 |
+
if version <= constraint:
|
| 343 |
+
return False
|
| 344 |
+
release_clause = constraint._release_clause
|
| 345 |
+
pfx = '.'.join([str(i) for i in release_clause])
|
| 346 |
+
return not _match_prefix(version, pfx)
|
| 347 |
+
|
| 348 |
+
def _match_le(self, version, constraint, prefix):
|
| 349 |
+
version, constraint = self._adjust_local(version, constraint, prefix)
|
| 350 |
+
return version <= constraint
|
| 351 |
+
|
| 352 |
+
def _match_ge(self, version, constraint, prefix):
|
| 353 |
+
version, constraint = self._adjust_local(version, constraint, prefix)
|
| 354 |
+
return version >= constraint
|
| 355 |
+
|
| 356 |
+
def _match_eq(self, version, constraint, prefix):
|
| 357 |
+
version, constraint = self._adjust_local(version, constraint, prefix)
|
| 358 |
+
if not prefix:
|
| 359 |
+
result = (version == constraint)
|
| 360 |
+
else:
|
| 361 |
+
result = _match_prefix(version, constraint)
|
| 362 |
+
return result
|
| 363 |
+
|
| 364 |
+
def _match_arbitrary(self, version, constraint, prefix):
|
| 365 |
+
return str(version) == str(constraint)
|
| 366 |
+
|
| 367 |
+
def _match_ne(self, version, constraint, prefix):
|
| 368 |
+
version, constraint = self._adjust_local(version, constraint, prefix)
|
| 369 |
+
if not prefix:
|
| 370 |
+
result = (version != constraint)
|
| 371 |
+
else:
|
| 372 |
+
result = not _match_prefix(version, constraint)
|
| 373 |
+
return result
|
| 374 |
+
|
| 375 |
+
def _match_compatible(self, version, constraint, prefix):
|
| 376 |
+
version, constraint = self._adjust_local(version, constraint, prefix)
|
| 377 |
+
if version == constraint:
|
| 378 |
+
return True
|
| 379 |
+
if version < constraint:
|
| 380 |
+
return False
|
| 381 |
+
# if not prefix:
|
| 382 |
+
# return True
|
| 383 |
+
release_clause = constraint._release_clause
|
| 384 |
+
if len(release_clause) > 1:
|
| 385 |
+
release_clause = release_clause[:-1]
|
| 386 |
+
pfx = '.'.join([str(i) for i in release_clause])
|
| 387 |
+
return _match_prefix(version, pfx)
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
_REPLACEMENTS = (
|
| 391 |
+
(re.compile('[.+-]$'), ''), # remove trailing puncts
|
| 392 |
+
(re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start
|
| 393 |
+
(re.compile('^[.-]'), ''), # remove leading puncts
|
| 394 |
+
(re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses
|
| 395 |
+
(re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
|
| 396 |
+
(re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
|
| 397 |
+
(re.compile('[.]{2,}'), '.'), # multiple runs of '.'
|
| 398 |
+
(re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha
|
| 399 |
+
(re.compile(r'\b(pre-alpha|prealpha)\b'),
|
| 400 |
+
'pre.alpha'), # standardise
|
| 401 |
+
(re.compile(r'\(beta\)$'), 'beta'), # remove parentheses
|
| 402 |
+
)
|
| 403 |
+
|
| 404 |
+
_SUFFIX_REPLACEMENTS = (
|
| 405 |
+
(re.compile('^[:~._+-]+'), ''), # remove leading puncts
|
| 406 |
+
(re.compile('[,*")([\\]]'), ''), # remove unwanted chars
|
| 407 |
+
(re.compile('[~:+_ -]'), '.'), # replace illegal chars
|
| 408 |
+
(re.compile('[.]{2,}'), '.'), # multiple runs of '.'
|
| 409 |
+
(re.compile(r'\.$'), ''), # trailing '.'
|
| 410 |
+
)
|
| 411 |
+
|
| 412 |
+
_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)')
|
| 413 |
+
|
| 414 |
+
|
| 415 |
+
def _suggest_semantic_version(s):
|
| 416 |
+
"""
|
| 417 |
+
Try to suggest a semantic form for a version for which
|
| 418 |
+
_suggest_normalized_version couldn't come up with anything.
|
| 419 |
+
"""
|
| 420 |
+
result = s.strip().lower()
|
| 421 |
+
for pat, repl in _REPLACEMENTS:
|
| 422 |
+
result = pat.sub(repl, result)
|
| 423 |
+
if not result:
|
| 424 |
+
result = '0.0.0'
|
| 425 |
+
|
| 426 |
+
# Now look for numeric prefix, and separate it out from
|
| 427 |
+
# the rest.
|
| 428 |
+
# import pdb; pdb.set_trace()
|
| 429 |
+
m = _NUMERIC_PREFIX.match(result)
|
| 430 |
+
if not m:
|
| 431 |
+
prefix = '0.0.0'
|
| 432 |
+
suffix = result
|
| 433 |
+
else:
|
| 434 |
+
prefix = m.groups()[0].split('.')
|
| 435 |
+
prefix = [int(i) for i in prefix]
|
| 436 |
+
while len(prefix) < 3:
|
| 437 |
+
prefix.append(0)
|
| 438 |
+
if len(prefix) == 3:
|
| 439 |
+
suffix = result[m.end():]
|
| 440 |
+
else:
|
| 441 |
+
suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():]
|
| 442 |
+
prefix = prefix[:3]
|
| 443 |
+
prefix = '.'.join([str(i) for i in prefix])
|
| 444 |
+
suffix = suffix.strip()
|
| 445 |
+
if suffix:
|
| 446 |
+
# import pdb; pdb.set_trace()
|
| 447 |
+
# massage the suffix.
|
| 448 |
+
for pat, repl in _SUFFIX_REPLACEMENTS:
|
| 449 |
+
suffix = pat.sub(repl, suffix)
|
| 450 |
+
|
| 451 |
+
if not suffix:
|
| 452 |
+
result = prefix
|
| 453 |
+
else:
|
| 454 |
+
sep = '-' if 'dev' in suffix else '+'
|
| 455 |
+
result = prefix + sep + suffix
|
| 456 |
+
if not is_semver(result):
|
| 457 |
+
result = None
|
| 458 |
+
return result
|
| 459 |
+
|
| 460 |
+
|
| 461 |
+
def _suggest_normalized_version(s):
|
| 462 |
+
"""Suggest a normalized version close to the given version string.
|
| 463 |
+
|
| 464 |
+
If you have a version string that isn't rational (i.e. NormalizedVersion
|
| 465 |
+
doesn't like it) then you might be able to get an equivalent (or close)
|
| 466 |
+
rational version from this function.
|
| 467 |
+
|
| 468 |
+
This does a number of simple normalizations to the given string, based
|
| 469 |
+
on observation of versions currently in use on PyPI. Given a dump of
|
| 470 |
+
those version during PyCon 2009, 4287 of them:
|
| 471 |
+
- 2312 (53.93%) match NormalizedVersion without change
|
| 472 |
+
with the automatic suggestion
|
| 473 |
+
- 3474 (81.04%) match when using this suggestion method
|
| 474 |
+
|
| 475 |
+
@param s {str} An irrational version string.
|
| 476 |
+
@returns A rational version string, or None, if couldn't determine one.
|
| 477 |
+
"""
|
| 478 |
+
try:
|
| 479 |
+
_normalized_key(s)
|
| 480 |
+
return s # already rational
|
| 481 |
+
except UnsupportedVersionError:
|
| 482 |
+
pass
|
| 483 |
+
|
| 484 |
+
rs = s.lower()
|
| 485 |
+
|
| 486 |
+
# part of this could use maketrans
|
| 487 |
+
for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
|
| 488 |
+
('beta', 'b'), ('rc', 'c'), ('-final', ''),
|
| 489 |
+
('-pre', 'c'),
|
| 490 |
+
('-release', ''), ('.release', ''), ('-stable', ''),
|
| 491 |
+
('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
|
| 492 |
+
('final', '')):
|
| 493 |
+
rs = rs.replace(orig, repl)
|
| 494 |
+
|
| 495 |
+
# if something ends with dev or pre, we add a 0
|
| 496 |
+
rs = re.sub(r"pre$", r"pre0", rs)
|
| 497 |
+
rs = re.sub(r"dev$", r"dev0", rs)
|
| 498 |
+
|
| 499 |
+
# if we have something like "b-2" or "a.2" at the end of the
|
| 500 |
+
# version, that is probably beta, alpha, etc
|
| 501 |
+
# let's remove the dash or dot
|
| 502 |
+
rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)
|
| 503 |
+
|
| 504 |
+
# 1.0-dev-r371 -> 1.0.dev371
|
| 505 |
+
# 0.1-dev-r79 -> 0.1.dev79
|
| 506 |
+
rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
|
| 507 |
+
|
| 508 |
+
# Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
|
| 509 |
+
rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
|
| 510 |
+
|
| 511 |
+
# Clean: v0.3, v1.0
|
| 512 |
+
if rs.startswith('v'):
|
| 513 |
+
rs = rs[1:]
|
| 514 |
+
|
| 515 |
+
# Clean leading '0's on numbers.
|
| 516 |
+
# TODO: unintended side-effect on, e.g., "2003.05.09"
|
| 517 |
+
# PyPI stats: 77 (~2%) better
|
| 518 |
+
rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
|
| 519 |
+
|
| 520 |
+
# Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
|
| 521 |
+
# zero.
|
| 522 |
+
# PyPI stats: 245 (7.56%) better
|
| 523 |
+
rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
|
| 524 |
+
|
| 525 |
+
# the 'dev-rNNN' tag is a dev tag
|
| 526 |
+
rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
|
| 527 |
+
|
| 528 |
+
# clean the - when used as a pre delimiter
|
| 529 |
+
rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
|
| 530 |
+
|
| 531 |
+
# a terminal "dev" or "devel" can be changed into ".dev0"
|
| 532 |
+
rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
|
| 533 |
+
|
| 534 |
+
# a terminal "dev" can be changed into ".dev0"
|
| 535 |
+
rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
|
| 536 |
+
|
| 537 |
+
# a terminal "final" or "stable" can be removed
|
| 538 |
+
rs = re.sub(r"(final|stable)$", "", rs)
|
| 539 |
+
|
| 540 |
+
# The 'r' and the '-' tags are post release tags
|
| 541 |
+
# 0.4a1.r10 -> 0.4a1.post10
|
| 542 |
+
# 0.9.33-17222 -> 0.9.33.post17222
|
| 543 |
+
# 0.9.33-r17222 -> 0.9.33.post17222
|
| 544 |
+
rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
|
| 545 |
+
|
| 546 |
+
# Clean 'r' instead of 'dev' usage:
|
| 547 |
+
# 0.9.33+r17222 -> 0.9.33.dev17222
|
| 548 |
+
# 1.0dev123 -> 1.0.dev123
|
| 549 |
+
# 1.0.git123 -> 1.0.dev123
|
| 550 |
+
# 1.0.bzr123 -> 1.0.dev123
|
| 551 |
+
# 0.1a0dev.123 -> 0.1a0.dev123
|
| 552 |
+
# PyPI stats: ~150 (~4%) better
|
| 553 |
+
rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
|
| 554 |
+
|
| 555 |
+
# Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
|
| 556 |
+
# 0.2.pre1 -> 0.2c1
|
| 557 |
+
# 0.2-c1 -> 0.2c1
|
| 558 |
+
# 1.0preview123 -> 1.0c123
|
| 559 |
+
# PyPI stats: ~21 (0.62%) better
|
| 560 |
+
rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
|
| 561 |
+
|
| 562 |
+
# Tcl/Tk uses "px" for their post release markers
|
| 563 |
+
rs = re.sub(r"p(\d+)$", r".post\1", rs)
|
| 564 |
+
|
| 565 |
+
try:
|
| 566 |
+
_normalized_key(rs)
|
| 567 |
+
except UnsupportedVersionError:
|
| 568 |
+
rs = None
|
| 569 |
+
return rs
|
| 570 |
+
|
| 571 |
+
#
|
| 572 |
+
# Legacy version processing (distribute-compatible)
|
| 573 |
+
#
|
| 574 |
+
|
| 575 |
+
|
| 576 |
+
_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I)
|
| 577 |
+
_VERSION_REPLACE = {
|
| 578 |
+
'pre': 'c',
|
| 579 |
+
'preview': 'c',
|
| 580 |
+
'-': 'final-',
|
| 581 |
+
'rc': 'c',
|
| 582 |
+
'dev': '@',
|
| 583 |
+
'': None,
|
| 584 |
+
'.': None,
|
| 585 |
+
}
|
| 586 |
+
|
| 587 |
+
|
| 588 |
+
def _legacy_key(s):
|
| 589 |
+
def get_parts(s):
|
| 590 |
+
result = []
|
| 591 |
+
for p in _VERSION_PART.split(s.lower()):
|
| 592 |
+
p = _VERSION_REPLACE.get(p, p)
|
| 593 |
+
if p:
|
| 594 |
+
if '0' <= p[:1] <= '9':
|
| 595 |
+
p = p.zfill(8)
|
| 596 |
+
else:
|
| 597 |
+
p = '*' + p
|
| 598 |
+
result.append(p)
|
| 599 |
+
result.append('*final')
|
| 600 |
+
return result
|
| 601 |
+
|
| 602 |
+
result = []
|
| 603 |
+
for p in get_parts(s):
|
| 604 |
+
if p.startswith('*'):
|
| 605 |
+
if p < '*final':
|
| 606 |
+
while result and result[-1] == '*final-':
|
| 607 |
+
result.pop()
|
| 608 |
+
while result and result[-1] == '00000000':
|
| 609 |
+
result.pop()
|
| 610 |
+
result.append(p)
|
| 611 |
+
return tuple(result)
|
| 612 |
+
|
| 613 |
+
|
| 614 |
+
class LegacyVersion(Version):
|
| 615 |
+
def parse(self, s):
|
| 616 |
+
return _legacy_key(s)
|
| 617 |
+
|
| 618 |
+
@property
|
| 619 |
+
def is_prerelease(self):
|
| 620 |
+
result = False
|
| 621 |
+
for x in self._parts:
|
| 622 |
+
if (isinstance(x, string_types) and x.startswith('*') and x < '*final'):
|
| 623 |
+
result = True
|
| 624 |
+
break
|
| 625 |
+
return result
|
| 626 |
+
|
| 627 |
+
|
| 628 |
+
class LegacyMatcher(Matcher):
|
| 629 |
+
version_class = LegacyVersion
|
| 630 |
+
|
| 631 |
+
_operators = dict(Matcher._operators)
|
| 632 |
+
_operators['~='] = '_match_compatible'
|
| 633 |
+
|
| 634 |
+
numeric_re = re.compile(r'^(\d+(\.\d+)*)')
|
| 635 |
+
|
| 636 |
+
def _match_compatible(self, version, constraint, prefix):
|
| 637 |
+
if version < constraint:
|
| 638 |
+
return False
|
| 639 |
+
m = self.numeric_re.match(str(constraint))
|
| 640 |
+
if not m:
|
| 641 |
+
logger.warning('Cannot compute compatible match for version %s '
|
| 642 |
+
' and constraint %s', version, constraint)
|
| 643 |
+
return True
|
| 644 |
+
s = m.groups()[0]
|
| 645 |
+
if '.' in s:
|
| 646 |
+
s = s.rsplit('.', 1)[0]
|
| 647 |
+
return _match_prefix(version, s)
|
| 648 |
+
|
| 649 |
+
#
|
| 650 |
+
# Semantic versioning
|
| 651 |
+
#
|
| 652 |
+
|
| 653 |
+
|
| 654 |
+
_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)'
|
| 655 |
+
r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?'
|
| 656 |
+
r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I)
|
| 657 |
+
|
| 658 |
+
|
| 659 |
+
def is_semver(s):
|
| 660 |
+
return _SEMVER_RE.match(s)
|
| 661 |
+
|
| 662 |
+
|
| 663 |
+
def _semantic_key(s):
|
| 664 |
+
def make_tuple(s, absent):
|
| 665 |
+
if s is None:
|
| 666 |
+
result = (absent,)
|
| 667 |
+
else:
|
| 668 |
+
parts = s[1:].split('.')
|
| 669 |
+
# We can't compare ints and strings on Python 3, so fudge it
|
| 670 |
+
# by zero-filling numeric values so simulate a numeric comparison
|
| 671 |
+
result = tuple([p.zfill(8) if p.isdigit() else p for p in parts])
|
| 672 |
+
return result
|
| 673 |
+
|
| 674 |
+
m = is_semver(s)
|
| 675 |
+
if not m:
|
| 676 |
+
raise UnsupportedVersionError(s)
|
| 677 |
+
groups = m.groups()
|
| 678 |
+
major, minor, patch = [int(i) for i in groups[:3]]
|
| 679 |
+
# choose the '|' and '*' so that versions sort correctly
|
| 680 |
+
pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*')
|
| 681 |
+
return (major, minor, patch), pre, build
|
| 682 |
+
|
| 683 |
+
|
| 684 |
+
class SemanticVersion(Version):
|
| 685 |
+
def parse(self, s):
|
| 686 |
+
return _semantic_key(s)
|
| 687 |
+
|
| 688 |
+
@property
|
| 689 |
+
def is_prerelease(self):
|
| 690 |
+
return self._parts[1][0] != '|'
|
| 691 |
+
|
| 692 |
+
|
| 693 |
+
class SemanticMatcher(Matcher):
|
| 694 |
+
version_class = SemanticVersion
|
| 695 |
+
|
| 696 |
+
|
| 697 |
+
class VersionScheme(object):
|
| 698 |
+
def __init__(self, key, matcher, suggester=None):
|
| 699 |
+
self.key = key
|
| 700 |
+
self.matcher = matcher
|
| 701 |
+
self.suggester = suggester
|
| 702 |
+
|
| 703 |
+
def is_valid_version(self, s):
|
| 704 |
+
try:
|
| 705 |
+
self.matcher.version_class(s)
|
| 706 |
+
result = True
|
| 707 |
+
except UnsupportedVersionError:
|
| 708 |
+
result = False
|
| 709 |
+
return result
|
| 710 |
+
|
| 711 |
+
def is_valid_matcher(self, s):
|
| 712 |
+
try:
|
| 713 |
+
self.matcher(s)
|
| 714 |
+
result = True
|
| 715 |
+
except UnsupportedVersionError:
|
| 716 |
+
result = False
|
| 717 |
+
return result
|
| 718 |
+
|
| 719 |
+
def is_valid_constraint_list(self, s):
|
| 720 |
+
"""
|
| 721 |
+
Used for processing some metadata fields
|
| 722 |
+
"""
|
| 723 |
+
# See issue #140. Be tolerant of a single trailing comma.
|
| 724 |
+
if s.endswith(','):
|
| 725 |
+
s = s[:-1]
|
| 726 |
+
return self.is_valid_matcher('dummy_name (%s)' % s)
|
| 727 |
+
|
| 728 |
+
def suggest(self, s):
|
| 729 |
+
if self.suggester is None:
|
| 730 |
+
result = None
|
| 731 |
+
else:
|
| 732 |
+
result = self.suggester(s)
|
| 733 |
+
return result
|
| 734 |
+
|
| 735 |
+
|
| 736 |
+
_SCHEMES = {
|
| 737 |
+
'normalized': VersionScheme(_normalized_key, NormalizedMatcher,
|
| 738 |
+
_suggest_normalized_version),
|
| 739 |
+
'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s),
|
| 740 |
+
'semantic': VersionScheme(_semantic_key, SemanticMatcher,
|
| 741 |
+
_suggest_semantic_version),
|
| 742 |
+
}
|
| 743 |
+
|
| 744 |
+
_SCHEMES['default'] = _SCHEMES['normalized']
|
| 745 |
+
|
| 746 |
+
|
| 747 |
+
def get_scheme(name):
|
| 748 |
+
if name not in _SCHEMES:
|
| 749 |
+
raise ValueError('unknown scheme name: %r' % name)
|
| 750 |
+
return _SCHEMES[name]
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/w32.exe
ADDED
|
Binary file (91.6 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/distlib/wheel.py
ADDED
|
@@ -0,0 +1,1100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
#
|
| 3 |
+
# Copyright (C) 2013-2023 Vinay Sajip.
|
| 4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
| 5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 6 |
+
#
|
| 7 |
+
from __future__ import unicode_literals
|
| 8 |
+
|
| 9 |
+
import base64
|
| 10 |
+
import codecs
|
| 11 |
+
import datetime
|
| 12 |
+
from email import message_from_file
|
| 13 |
+
import hashlib
|
| 14 |
+
import json
|
| 15 |
+
import logging
|
| 16 |
+
import os
|
| 17 |
+
import posixpath
|
| 18 |
+
import re
|
| 19 |
+
import shutil
|
| 20 |
+
import sys
|
| 21 |
+
import tempfile
|
| 22 |
+
import zipfile
|
| 23 |
+
|
| 24 |
+
from . import __version__, DistlibException
|
| 25 |
+
from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
|
| 26 |
+
from .database import InstalledDistribution
|
| 27 |
+
from .metadata import Metadata, WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME
|
| 28 |
+
from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, cached_property, get_cache_base,
|
| 29 |
+
read_exports, tempdir, get_platform)
|
| 30 |
+
from .version import NormalizedVersion, UnsupportedVersionError
|
| 31 |
+
|
| 32 |
+
logger = logging.getLogger(__name__)
|
| 33 |
+
|
| 34 |
+
cache = None # created when needed
|
| 35 |
+
|
| 36 |
+
if hasattr(sys, 'pypy_version_info'): # pragma: no cover
|
| 37 |
+
IMP_PREFIX = 'pp'
|
| 38 |
+
elif sys.platform.startswith('java'): # pragma: no cover
|
| 39 |
+
IMP_PREFIX = 'jy'
|
| 40 |
+
elif sys.platform == 'cli': # pragma: no cover
|
| 41 |
+
IMP_PREFIX = 'ip'
|
| 42 |
+
else:
|
| 43 |
+
IMP_PREFIX = 'cp'
|
| 44 |
+
|
| 45 |
+
VER_SUFFIX = sysconfig.get_config_var('py_version_nodot')
|
| 46 |
+
if not VER_SUFFIX: # pragma: no cover
|
| 47 |
+
VER_SUFFIX = '%s%s' % sys.version_info[:2]
|
| 48 |
+
PYVER = 'py' + VER_SUFFIX
|
| 49 |
+
IMPVER = IMP_PREFIX + VER_SUFFIX
|
| 50 |
+
|
| 51 |
+
ARCH = get_platform().replace('-', '_').replace('.', '_')
|
| 52 |
+
|
| 53 |
+
ABI = sysconfig.get_config_var('SOABI')
|
| 54 |
+
if ABI and ABI.startswith('cpython-'):
|
| 55 |
+
ABI = ABI.replace('cpython-', 'cp').split('-')[0]
|
| 56 |
+
else:
|
| 57 |
+
|
| 58 |
+
def _derive_abi():
|
| 59 |
+
parts = ['cp', VER_SUFFIX]
|
| 60 |
+
if sysconfig.get_config_var('Py_DEBUG'):
|
| 61 |
+
parts.append('d')
|
| 62 |
+
if IMP_PREFIX == 'cp':
|
| 63 |
+
vi = sys.version_info[:2]
|
| 64 |
+
if vi < (3, 8):
|
| 65 |
+
wpm = sysconfig.get_config_var('WITH_PYMALLOC')
|
| 66 |
+
if wpm is None:
|
| 67 |
+
wpm = True
|
| 68 |
+
if wpm:
|
| 69 |
+
parts.append('m')
|
| 70 |
+
if vi < (3, 3):
|
| 71 |
+
us = sysconfig.get_config_var('Py_UNICODE_SIZE')
|
| 72 |
+
if us == 4 or (us is None and sys.maxunicode == 0x10FFFF):
|
| 73 |
+
parts.append('u')
|
| 74 |
+
return ''.join(parts)
|
| 75 |
+
|
| 76 |
+
ABI = _derive_abi()
|
| 77 |
+
del _derive_abi
|
| 78 |
+
|
| 79 |
+
FILENAME_RE = re.compile(
|
| 80 |
+
r'''
|
| 81 |
+
(?P<nm>[^-]+)
|
| 82 |
+
-(?P<vn>\d+[^-]*)
|
| 83 |
+
(-(?P<bn>\d+[^-]*))?
|
| 84 |
+
-(?P<py>\w+\d+(\.\w+\d+)*)
|
| 85 |
+
-(?P<bi>\w+)
|
| 86 |
+
-(?P<ar>\w+(\.\w+)*)
|
| 87 |
+
\.whl$
|
| 88 |
+
''', re.IGNORECASE | re.VERBOSE)
|
| 89 |
+
|
| 90 |
+
NAME_VERSION_RE = re.compile(r'''
|
| 91 |
+
(?P<nm>[^-]+)
|
| 92 |
+
-(?P<vn>\d+[^-]*)
|
| 93 |
+
(-(?P<bn>\d+[^-]*))?$
|
| 94 |
+
''', re.IGNORECASE | re.VERBOSE)
|
| 95 |
+
|
| 96 |
+
SHEBANG_RE = re.compile(br'\s*#![^\r\n]*')
|
| 97 |
+
SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$')
|
| 98 |
+
SHEBANG_PYTHON = b'#!python'
|
| 99 |
+
SHEBANG_PYTHONW = b'#!pythonw'
|
| 100 |
+
|
| 101 |
+
if os.sep == '/':
|
| 102 |
+
to_posix = lambda o: o
|
| 103 |
+
else:
|
| 104 |
+
to_posix = lambda o: o.replace(os.sep, '/')
|
| 105 |
+
|
| 106 |
+
if sys.version_info[0] < 3:
|
| 107 |
+
import imp
|
| 108 |
+
else:
|
| 109 |
+
imp = None
|
| 110 |
+
import importlib.machinery
|
| 111 |
+
import importlib.util
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def _get_suffixes():
|
| 115 |
+
if imp:
|
| 116 |
+
return [s[0] for s in imp.get_suffixes()]
|
| 117 |
+
else:
|
| 118 |
+
return importlib.machinery.EXTENSION_SUFFIXES
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def _load_dynamic(name, path):
|
| 122 |
+
# https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly
|
| 123 |
+
if imp:
|
| 124 |
+
return imp.load_dynamic(name, path)
|
| 125 |
+
else:
|
| 126 |
+
spec = importlib.util.spec_from_file_location(name, path)
|
| 127 |
+
module = importlib.util.module_from_spec(spec)
|
| 128 |
+
sys.modules[name] = module
|
| 129 |
+
spec.loader.exec_module(module)
|
| 130 |
+
return module
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
class Mounter(object):
|
| 134 |
+
|
| 135 |
+
def __init__(self):
|
| 136 |
+
self.impure_wheels = {}
|
| 137 |
+
self.libs = {}
|
| 138 |
+
|
| 139 |
+
def add(self, pathname, extensions):
|
| 140 |
+
self.impure_wheels[pathname] = extensions
|
| 141 |
+
self.libs.update(extensions)
|
| 142 |
+
|
| 143 |
+
def remove(self, pathname):
|
| 144 |
+
extensions = self.impure_wheels.pop(pathname)
|
| 145 |
+
for k, v in extensions:
|
| 146 |
+
if k in self.libs:
|
| 147 |
+
del self.libs[k]
|
| 148 |
+
|
| 149 |
+
def find_module(self, fullname, path=None):
|
| 150 |
+
if fullname in self.libs:
|
| 151 |
+
result = self
|
| 152 |
+
else:
|
| 153 |
+
result = None
|
| 154 |
+
return result
|
| 155 |
+
|
| 156 |
+
def load_module(self, fullname):
|
| 157 |
+
if fullname in sys.modules:
|
| 158 |
+
result = sys.modules[fullname]
|
| 159 |
+
else:
|
| 160 |
+
if fullname not in self.libs:
|
| 161 |
+
raise ImportError('unable to find extension for %s' % fullname)
|
| 162 |
+
result = _load_dynamic(fullname, self.libs[fullname])
|
| 163 |
+
result.__loader__ = self
|
| 164 |
+
parts = fullname.rsplit('.', 1)
|
| 165 |
+
if len(parts) > 1:
|
| 166 |
+
result.__package__ = parts[0]
|
| 167 |
+
return result
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
_hook = Mounter()
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
class Wheel(object):
|
| 174 |
+
"""
|
| 175 |
+
Class to build and install from Wheel files (PEP 427).
|
| 176 |
+
"""
|
| 177 |
+
|
| 178 |
+
wheel_version = (1, 1)
|
| 179 |
+
hash_kind = 'sha256'
|
| 180 |
+
|
| 181 |
+
def __init__(self, filename=None, sign=False, verify=False):
|
| 182 |
+
"""
|
| 183 |
+
Initialise an instance using a (valid) filename.
|
| 184 |
+
"""
|
| 185 |
+
self.sign = sign
|
| 186 |
+
self.should_verify = verify
|
| 187 |
+
self.buildver = ''
|
| 188 |
+
self.pyver = [PYVER]
|
| 189 |
+
self.abi = ['none']
|
| 190 |
+
self.arch = ['any']
|
| 191 |
+
self.dirname = os.getcwd()
|
| 192 |
+
if filename is None:
|
| 193 |
+
self.name = 'dummy'
|
| 194 |
+
self.version = '0.1'
|
| 195 |
+
self._filename = self.filename
|
| 196 |
+
else:
|
| 197 |
+
m = NAME_VERSION_RE.match(filename)
|
| 198 |
+
if m:
|
| 199 |
+
info = m.groupdict('')
|
| 200 |
+
self.name = info['nm']
|
| 201 |
+
# Reinstate the local version separator
|
| 202 |
+
self.version = info['vn'].replace('_', '-')
|
| 203 |
+
self.buildver = info['bn']
|
| 204 |
+
self._filename = self.filename
|
| 205 |
+
else:
|
| 206 |
+
dirname, filename = os.path.split(filename)
|
| 207 |
+
m = FILENAME_RE.match(filename)
|
| 208 |
+
if not m:
|
| 209 |
+
raise DistlibException('Invalid name or '
|
| 210 |
+
'filename: %r' % filename)
|
| 211 |
+
if dirname:
|
| 212 |
+
self.dirname = os.path.abspath(dirname)
|
| 213 |
+
self._filename = filename
|
| 214 |
+
info = m.groupdict('')
|
| 215 |
+
self.name = info['nm']
|
| 216 |
+
self.version = info['vn']
|
| 217 |
+
self.buildver = info['bn']
|
| 218 |
+
self.pyver = info['py'].split('.')
|
| 219 |
+
self.abi = info['bi'].split('.')
|
| 220 |
+
self.arch = info['ar'].split('.')
|
| 221 |
+
|
| 222 |
+
@property
|
| 223 |
+
def filename(self):
|
| 224 |
+
"""
|
| 225 |
+
Build and return a filename from the various components.
|
| 226 |
+
"""
|
| 227 |
+
if self.buildver:
|
| 228 |
+
buildver = '-' + self.buildver
|
| 229 |
+
else:
|
| 230 |
+
buildver = ''
|
| 231 |
+
pyver = '.'.join(self.pyver)
|
| 232 |
+
abi = '.'.join(self.abi)
|
| 233 |
+
arch = '.'.join(self.arch)
|
| 234 |
+
# replace - with _ as a local version separator
|
| 235 |
+
version = self.version.replace('-', '_')
|
| 236 |
+
return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, pyver, abi, arch)
|
| 237 |
+
|
| 238 |
+
@property
|
| 239 |
+
def exists(self):
|
| 240 |
+
path = os.path.join(self.dirname, self.filename)
|
| 241 |
+
return os.path.isfile(path)
|
| 242 |
+
|
| 243 |
+
@property
|
| 244 |
+
def tags(self):
|
| 245 |
+
for pyver in self.pyver:
|
| 246 |
+
for abi in self.abi:
|
| 247 |
+
for arch in self.arch:
|
| 248 |
+
yield pyver, abi, arch
|
| 249 |
+
|
| 250 |
+
@cached_property
|
| 251 |
+
def metadata(self):
|
| 252 |
+
pathname = os.path.join(self.dirname, self.filename)
|
| 253 |
+
name_ver = '%s-%s' % (self.name, self.version)
|
| 254 |
+
info_dir = '%s.dist-info' % name_ver
|
| 255 |
+
wrapper = codecs.getreader('utf-8')
|
| 256 |
+
with ZipFile(pathname, 'r') as zf:
|
| 257 |
+
self.get_wheel_metadata(zf)
|
| 258 |
+
# wv = wheel_metadata['Wheel-Version'].split('.', 1)
|
| 259 |
+
# file_version = tuple([int(i) for i in wv])
|
| 260 |
+
# if file_version < (1, 1):
|
| 261 |
+
# fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME,
|
| 262 |
+
# LEGACY_METADATA_FILENAME]
|
| 263 |
+
# else:
|
| 264 |
+
# fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME]
|
| 265 |
+
fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME]
|
| 266 |
+
result = None
|
| 267 |
+
for fn in fns:
|
| 268 |
+
try:
|
| 269 |
+
metadata_filename = posixpath.join(info_dir, fn)
|
| 270 |
+
with zf.open(metadata_filename) as bf:
|
| 271 |
+
wf = wrapper(bf)
|
| 272 |
+
result = Metadata(fileobj=wf)
|
| 273 |
+
if result:
|
| 274 |
+
break
|
| 275 |
+
except KeyError:
|
| 276 |
+
pass
|
| 277 |
+
if not result:
|
| 278 |
+
raise ValueError('Invalid wheel, because metadata is '
|
| 279 |
+
'missing: looked in %s' % ', '.join(fns))
|
| 280 |
+
return result
|
| 281 |
+
|
| 282 |
+
def get_wheel_metadata(self, zf):
|
| 283 |
+
name_ver = '%s-%s' % (self.name, self.version)
|
| 284 |
+
info_dir = '%s.dist-info' % name_ver
|
| 285 |
+
metadata_filename = posixpath.join(info_dir, 'WHEEL')
|
| 286 |
+
with zf.open(metadata_filename) as bf:
|
| 287 |
+
wf = codecs.getreader('utf-8')(bf)
|
| 288 |
+
message = message_from_file(wf)
|
| 289 |
+
return dict(message)
|
| 290 |
+
|
| 291 |
+
@cached_property
|
| 292 |
+
def info(self):
|
| 293 |
+
pathname = os.path.join(self.dirname, self.filename)
|
| 294 |
+
with ZipFile(pathname, 'r') as zf:
|
| 295 |
+
result = self.get_wheel_metadata(zf)
|
| 296 |
+
return result
|
| 297 |
+
|
| 298 |
+
def process_shebang(self, data):
|
| 299 |
+
m = SHEBANG_RE.match(data)
|
| 300 |
+
if m:
|
| 301 |
+
end = m.end()
|
| 302 |
+
shebang, data_after_shebang = data[:end], data[end:]
|
| 303 |
+
# Preserve any arguments after the interpreter
|
| 304 |
+
if b'pythonw' in shebang.lower():
|
| 305 |
+
shebang_python = SHEBANG_PYTHONW
|
| 306 |
+
else:
|
| 307 |
+
shebang_python = SHEBANG_PYTHON
|
| 308 |
+
m = SHEBANG_DETAIL_RE.match(shebang)
|
| 309 |
+
if m:
|
| 310 |
+
args = b' ' + m.groups()[-1]
|
| 311 |
+
else:
|
| 312 |
+
args = b''
|
| 313 |
+
shebang = shebang_python + args
|
| 314 |
+
data = shebang + data_after_shebang
|
| 315 |
+
else:
|
| 316 |
+
cr = data.find(b'\r')
|
| 317 |
+
lf = data.find(b'\n')
|
| 318 |
+
if cr < 0 or cr > lf:
|
| 319 |
+
term = b'\n'
|
| 320 |
+
else:
|
| 321 |
+
if data[cr:cr + 2] == b'\r\n':
|
| 322 |
+
term = b'\r\n'
|
| 323 |
+
else:
|
| 324 |
+
term = b'\r'
|
| 325 |
+
data = SHEBANG_PYTHON + term + data
|
| 326 |
+
return data
|
| 327 |
+
|
| 328 |
+
def get_hash(self, data, hash_kind=None):
|
| 329 |
+
if hash_kind is None:
|
| 330 |
+
hash_kind = self.hash_kind
|
| 331 |
+
try:
|
| 332 |
+
hasher = getattr(hashlib, hash_kind)
|
| 333 |
+
except AttributeError:
|
| 334 |
+
raise DistlibException('Unsupported hash algorithm: %r' % hash_kind)
|
| 335 |
+
result = hasher(data).digest()
|
| 336 |
+
result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')
|
| 337 |
+
return hash_kind, result
|
| 338 |
+
|
| 339 |
+
def write_record(self, records, record_path, archive_record_path):
|
| 340 |
+
records = list(records) # make a copy, as mutated
|
| 341 |
+
records.append((archive_record_path, '', ''))
|
| 342 |
+
with CSVWriter(record_path) as writer:
|
| 343 |
+
for row in records:
|
| 344 |
+
writer.writerow(row)
|
| 345 |
+
|
| 346 |
+
def write_records(self, info, libdir, archive_paths):
|
| 347 |
+
records = []
|
| 348 |
+
distinfo, info_dir = info
|
| 349 |
+
# hasher = getattr(hashlib, self.hash_kind)
|
| 350 |
+
for ap, p in archive_paths:
|
| 351 |
+
with open(p, 'rb') as f:
|
| 352 |
+
data = f.read()
|
| 353 |
+
digest = '%s=%s' % self.get_hash(data)
|
| 354 |
+
size = os.path.getsize(p)
|
| 355 |
+
records.append((ap, digest, size))
|
| 356 |
+
|
| 357 |
+
p = os.path.join(distinfo, 'RECORD')
|
| 358 |
+
ap = to_posix(os.path.join(info_dir, 'RECORD'))
|
| 359 |
+
self.write_record(records, p, ap)
|
| 360 |
+
archive_paths.append((ap, p))
|
| 361 |
+
|
| 362 |
+
def build_zip(self, pathname, archive_paths):
|
| 363 |
+
with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
|
| 364 |
+
for ap, p in archive_paths:
|
| 365 |
+
logger.debug('Wrote %s to %s in wheel', p, ap)
|
| 366 |
+
zf.write(p, ap)
|
| 367 |
+
|
| 368 |
+
def build(self, paths, tags=None, wheel_version=None):
|
| 369 |
+
"""
|
| 370 |
+
Build a wheel from files in specified paths, and use any specified tags
|
| 371 |
+
when determining the name of the wheel.
|
| 372 |
+
"""
|
| 373 |
+
if tags is None:
|
| 374 |
+
tags = {}
|
| 375 |
+
|
| 376 |
+
libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]
|
| 377 |
+
if libkey == 'platlib':
|
| 378 |
+
is_pure = 'false'
|
| 379 |
+
default_pyver = [IMPVER]
|
| 380 |
+
default_abi = [ABI]
|
| 381 |
+
default_arch = [ARCH]
|
| 382 |
+
else:
|
| 383 |
+
is_pure = 'true'
|
| 384 |
+
default_pyver = [PYVER]
|
| 385 |
+
default_abi = ['none']
|
| 386 |
+
default_arch = ['any']
|
| 387 |
+
|
| 388 |
+
self.pyver = tags.get('pyver', default_pyver)
|
| 389 |
+
self.abi = tags.get('abi', default_abi)
|
| 390 |
+
self.arch = tags.get('arch', default_arch)
|
| 391 |
+
|
| 392 |
+
libdir = paths[libkey]
|
| 393 |
+
|
| 394 |
+
name_ver = '%s-%s' % (self.name, self.version)
|
| 395 |
+
data_dir = '%s.data' % name_ver
|
| 396 |
+
info_dir = '%s.dist-info' % name_ver
|
| 397 |
+
|
| 398 |
+
archive_paths = []
|
| 399 |
+
|
| 400 |
+
# First, stuff which is not in site-packages
|
| 401 |
+
for key in ('data', 'headers', 'scripts'):
|
| 402 |
+
if key not in paths:
|
| 403 |
+
continue
|
| 404 |
+
path = paths[key]
|
| 405 |
+
if os.path.isdir(path):
|
| 406 |
+
for root, dirs, files in os.walk(path):
|
| 407 |
+
for fn in files:
|
| 408 |
+
p = fsdecode(os.path.join(root, fn))
|
| 409 |
+
rp = os.path.relpath(p, path)
|
| 410 |
+
ap = to_posix(os.path.join(data_dir, key, rp))
|
| 411 |
+
archive_paths.append((ap, p))
|
| 412 |
+
if key == 'scripts' and not p.endswith('.exe'):
|
| 413 |
+
with open(p, 'rb') as f:
|
| 414 |
+
data = f.read()
|
| 415 |
+
data = self.process_shebang(data)
|
| 416 |
+
with open(p, 'wb') as f:
|
| 417 |
+
f.write(data)
|
| 418 |
+
|
| 419 |
+
# Now, stuff which is in site-packages, other than the
|
| 420 |
+
# distinfo stuff.
|
| 421 |
+
path = libdir
|
| 422 |
+
distinfo = None
|
| 423 |
+
for root, dirs, files in os.walk(path):
|
| 424 |
+
if root == path:
|
| 425 |
+
# At the top level only, save distinfo for later
|
| 426 |
+
# and skip it for now
|
| 427 |
+
for i, dn in enumerate(dirs):
|
| 428 |
+
dn = fsdecode(dn)
|
| 429 |
+
if dn.endswith('.dist-info'):
|
| 430 |
+
distinfo = os.path.join(root, dn)
|
| 431 |
+
del dirs[i]
|
| 432 |
+
break
|
| 433 |
+
assert distinfo, '.dist-info directory expected, not found'
|
| 434 |
+
|
| 435 |
+
for fn in files:
|
| 436 |
+
# comment out next suite to leave .pyc files in
|
| 437 |
+
if fsdecode(fn).endswith(('.pyc', '.pyo')):
|
| 438 |
+
continue
|
| 439 |
+
p = os.path.join(root, fn)
|
| 440 |
+
rp = to_posix(os.path.relpath(p, path))
|
| 441 |
+
archive_paths.append((rp, p))
|
| 442 |
+
|
| 443 |
+
# Now distinfo. Assumed to be flat, i.e. os.listdir is enough.
|
| 444 |
+
files = os.listdir(distinfo)
|
| 445 |
+
for fn in files:
|
| 446 |
+
if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):
|
| 447 |
+
p = fsdecode(os.path.join(distinfo, fn))
|
| 448 |
+
ap = to_posix(os.path.join(info_dir, fn))
|
| 449 |
+
archive_paths.append((ap, p))
|
| 450 |
+
|
| 451 |
+
wheel_metadata = [
|
| 452 |
+
'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
|
| 453 |
+
'Generator: distlib %s' % __version__,
|
| 454 |
+
'Root-Is-Purelib: %s' % is_pure,
|
| 455 |
+
]
|
| 456 |
+
for pyver, abi, arch in self.tags:
|
| 457 |
+
wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))
|
| 458 |
+
p = os.path.join(distinfo, 'WHEEL')
|
| 459 |
+
with open(p, 'w') as f:
|
| 460 |
+
f.write('\n'.join(wheel_metadata))
|
| 461 |
+
ap = to_posix(os.path.join(info_dir, 'WHEEL'))
|
| 462 |
+
archive_paths.append((ap, p))
|
| 463 |
+
|
| 464 |
+
# sort the entries by archive path. Not needed by any spec, but it
|
| 465 |
+
# keeps the archive listing and RECORD tidier than they would otherwise
|
| 466 |
+
# be. Use the number of path segments to keep directory entries together,
|
| 467 |
+
# and keep the dist-info stuff at the end.
|
| 468 |
+
def sorter(t):
|
| 469 |
+
ap = t[0]
|
| 470 |
+
n = ap.count('/')
|
| 471 |
+
if '.dist-info' in ap:
|
| 472 |
+
n += 10000
|
| 473 |
+
return (n, ap)
|
| 474 |
+
|
| 475 |
+
archive_paths = sorted(archive_paths, key=sorter)
|
| 476 |
+
|
| 477 |
+
# Now, at last, RECORD.
|
| 478 |
+
# Paths in here are archive paths - nothing else makes sense.
|
| 479 |
+
self.write_records((distinfo, info_dir), libdir, archive_paths)
|
| 480 |
+
# Now, ready to build the zip file
|
| 481 |
+
pathname = os.path.join(self.dirname, self.filename)
|
| 482 |
+
self.build_zip(pathname, archive_paths)
|
| 483 |
+
return pathname
|
| 484 |
+
|
| 485 |
+
def skip_entry(self, arcname):
|
| 486 |
+
"""
|
| 487 |
+
Determine whether an archive entry should be skipped when verifying
|
| 488 |
+
or installing.
|
| 489 |
+
"""
|
| 490 |
+
# The signature file won't be in RECORD,
|
| 491 |
+
# and we don't currently don't do anything with it
|
| 492 |
+
# We also skip directories, as they won't be in RECORD
|
| 493 |
+
# either. See:
|
| 494 |
+
#
|
| 495 |
+
# https://github.com/pypa/wheel/issues/294
|
| 496 |
+
# https://github.com/pypa/wheel/issues/287
|
| 497 |
+
# https://github.com/pypa/wheel/pull/289
|
| 498 |
+
#
|
| 499 |
+
return arcname.endswith(('/', '/RECORD.jws'))
|
| 500 |
+
|
| 501 |
+
def install(self, paths, maker, **kwargs):
|
| 502 |
+
"""
|
| 503 |
+
Install a wheel to the specified paths. If kwarg ``warner`` is
|
| 504 |
+
specified, it should be a callable, which will be called with two
|
| 505 |
+
tuples indicating the wheel version of this software and the wheel
|
| 506 |
+
version in the file, if there is a discrepancy in the versions.
|
| 507 |
+
This can be used to issue any warnings to raise any exceptions.
|
| 508 |
+
If kwarg ``lib_only`` is True, only the purelib/platlib files are
|
| 509 |
+
installed, and the headers, scripts, data and dist-info metadata are
|
| 510 |
+
not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
|
| 511 |
+
bytecode will try to use file-hash based invalidation (PEP-552) on
|
| 512 |
+
supported interpreter versions (CPython 3.7+).
|
| 513 |
+
|
| 514 |
+
The return value is a :class:`InstalledDistribution` instance unless
|
| 515 |
+
``options.lib_only`` is True, in which case the return value is ``None``.
|
| 516 |
+
"""
|
| 517 |
+
|
| 518 |
+
dry_run = maker.dry_run
|
| 519 |
+
warner = kwargs.get('warner')
|
| 520 |
+
lib_only = kwargs.get('lib_only', False)
|
| 521 |
+
bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False)
|
| 522 |
+
|
| 523 |
+
pathname = os.path.join(self.dirname, self.filename)
|
| 524 |
+
name_ver = '%s-%s' % (self.name, self.version)
|
| 525 |
+
data_dir = '%s.data' % name_ver
|
| 526 |
+
info_dir = '%s.dist-info' % name_ver
|
| 527 |
+
|
| 528 |
+
metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
|
| 529 |
+
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
|
| 530 |
+
record_name = posixpath.join(info_dir, 'RECORD')
|
| 531 |
+
|
| 532 |
+
wrapper = codecs.getreader('utf-8')
|
| 533 |
+
|
| 534 |
+
with ZipFile(pathname, 'r') as zf:
|
| 535 |
+
with zf.open(wheel_metadata_name) as bwf:
|
| 536 |
+
wf = wrapper(bwf)
|
| 537 |
+
message = message_from_file(wf)
|
| 538 |
+
wv = message['Wheel-Version'].split('.', 1)
|
| 539 |
+
file_version = tuple([int(i) for i in wv])
|
| 540 |
+
if (file_version != self.wheel_version) and warner:
|
| 541 |
+
warner(self.wheel_version, file_version)
|
| 542 |
+
|
| 543 |
+
if message['Root-Is-Purelib'] == 'true':
|
| 544 |
+
libdir = paths['purelib']
|
| 545 |
+
else:
|
| 546 |
+
libdir = paths['platlib']
|
| 547 |
+
|
| 548 |
+
records = {}
|
| 549 |
+
with zf.open(record_name) as bf:
|
| 550 |
+
with CSVReader(stream=bf) as reader:
|
| 551 |
+
for row in reader:
|
| 552 |
+
p = row[0]
|
| 553 |
+
records[p] = row
|
| 554 |
+
|
| 555 |
+
data_pfx = posixpath.join(data_dir, '')
|
| 556 |
+
info_pfx = posixpath.join(info_dir, '')
|
| 557 |
+
script_pfx = posixpath.join(data_dir, 'scripts', '')
|
| 558 |
+
|
| 559 |
+
# make a new instance rather than a copy of maker's,
|
| 560 |
+
# as we mutate it
|
| 561 |
+
fileop = FileOperator(dry_run=dry_run)
|
| 562 |
+
fileop.record = True # so we can rollback if needed
|
| 563 |
+
|
| 564 |
+
bc = not sys.dont_write_bytecode # Double negatives. Lovely!
|
| 565 |
+
|
| 566 |
+
outfiles = [] # for RECORD writing
|
| 567 |
+
|
| 568 |
+
# for script copying/shebang processing
|
| 569 |
+
workdir = tempfile.mkdtemp()
|
| 570 |
+
# set target dir later
|
| 571 |
+
# we default add_launchers to False, as the
|
| 572 |
+
# Python Launcher should be used instead
|
| 573 |
+
maker.source_dir = workdir
|
| 574 |
+
maker.target_dir = None
|
| 575 |
+
try:
|
| 576 |
+
for zinfo in zf.infolist():
|
| 577 |
+
arcname = zinfo.filename
|
| 578 |
+
if isinstance(arcname, text_type):
|
| 579 |
+
u_arcname = arcname
|
| 580 |
+
else:
|
| 581 |
+
u_arcname = arcname.decode('utf-8')
|
| 582 |
+
if self.skip_entry(u_arcname):
|
| 583 |
+
continue
|
| 584 |
+
row = records[u_arcname]
|
| 585 |
+
if row[2] and str(zinfo.file_size) != row[2]:
|
| 586 |
+
raise DistlibException('size mismatch for '
|
| 587 |
+
'%s' % u_arcname)
|
| 588 |
+
if row[1]:
|
| 589 |
+
kind, value = row[1].split('=', 1)
|
| 590 |
+
with zf.open(arcname) as bf:
|
| 591 |
+
data = bf.read()
|
| 592 |
+
_, digest = self.get_hash(data, kind)
|
| 593 |
+
if digest != value:
|
| 594 |
+
raise DistlibException('digest mismatch for '
|
| 595 |
+
'%s' % arcname)
|
| 596 |
+
|
| 597 |
+
if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
|
| 598 |
+
logger.debug('lib_only: skipping %s', u_arcname)
|
| 599 |
+
continue
|
| 600 |
+
is_script = (u_arcname.startswith(script_pfx) and not u_arcname.endswith('.exe'))
|
| 601 |
+
|
| 602 |
+
if u_arcname.startswith(data_pfx):
|
| 603 |
+
_, where, rp = u_arcname.split('/', 2)
|
| 604 |
+
outfile = os.path.join(paths[where], convert_path(rp))
|
| 605 |
+
else:
|
| 606 |
+
# meant for site-packages.
|
| 607 |
+
if u_arcname in (wheel_metadata_name, record_name):
|
| 608 |
+
continue
|
| 609 |
+
outfile = os.path.join(libdir, convert_path(u_arcname))
|
| 610 |
+
if not is_script:
|
| 611 |
+
with zf.open(arcname) as bf:
|
| 612 |
+
fileop.copy_stream(bf, outfile)
|
| 613 |
+
# Issue #147: permission bits aren't preserved. Using
|
| 614 |
+
# zf.extract(zinfo, libdir) should have worked, but didn't,
|
| 615 |
+
# see https://www.thetopsites.net/article/53834422.shtml
|
| 616 |
+
# So ... manually preserve permission bits as given in zinfo
|
| 617 |
+
if os.name == 'posix':
|
| 618 |
+
# just set the normal permission bits
|
| 619 |
+
os.chmod(outfile, (zinfo.external_attr >> 16) & 0x1FF)
|
| 620 |
+
outfiles.append(outfile)
|
| 621 |
+
# Double check the digest of the written file
|
| 622 |
+
if not dry_run and row[1]:
|
| 623 |
+
with open(outfile, 'rb') as bf:
|
| 624 |
+
data = bf.read()
|
| 625 |
+
_, newdigest = self.get_hash(data, kind)
|
| 626 |
+
if newdigest != digest:
|
| 627 |
+
raise DistlibException('digest mismatch '
|
| 628 |
+
'on write for '
|
| 629 |
+
'%s' % outfile)
|
| 630 |
+
if bc and outfile.endswith('.py'):
|
| 631 |
+
try:
|
| 632 |
+
pyc = fileop.byte_compile(outfile, hashed_invalidation=bc_hashed_invalidation)
|
| 633 |
+
outfiles.append(pyc)
|
| 634 |
+
except Exception:
|
| 635 |
+
# Don't give up if byte-compilation fails,
|
| 636 |
+
# but log it and perhaps warn the user
|
| 637 |
+
logger.warning('Byte-compilation failed', exc_info=True)
|
| 638 |
+
else:
|
| 639 |
+
fn = os.path.basename(convert_path(arcname))
|
| 640 |
+
workname = os.path.join(workdir, fn)
|
| 641 |
+
with zf.open(arcname) as bf:
|
| 642 |
+
fileop.copy_stream(bf, workname)
|
| 643 |
+
|
| 644 |
+
dn, fn = os.path.split(outfile)
|
| 645 |
+
maker.target_dir = dn
|
| 646 |
+
filenames = maker.make(fn)
|
| 647 |
+
fileop.set_executable_mode(filenames)
|
| 648 |
+
outfiles.extend(filenames)
|
| 649 |
+
|
| 650 |
+
if lib_only:
|
| 651 |
+
logger.debug('lib_only: returning None')
|
| 652 |
+
dist = None
|
| 653 |
+
else:
|
| 654 |
+
# Generate scripts
|
| 655 |
+
|
| 656 |
+
# Try to get pydist.json so we can see if there are
|
| 657 |
+
# any commands to generate. If this fails (e.g. because
|
| 658 |
+
# of a legacy wheel), log a warning but don't give up.
|
| 659 |
+
commands = None
|
| 660 |
+
file_version = self.info['Wheel-Version']
|
| 661 |
+
if file_version == '1.0':
|
| 662 |
+
# Use legacy info
|
| 663 |
+
ep = posixpath.join(info_dir, 'entry_points.txt')
|
| 664 |
+
try:
|
| 665 |
+
with zf.open(ep) as bwf:
|
| 666 |
+
epdata = read_exports(bwf)
|
| 667 |
+
commands = {}
|
| 668 |
+
for key in ('console', 'gui'):
|
| 669 |
+
k = '%s_scripts' % key
|
| 670 |
+
if k in epdata:
|
| 671 |
+
commands['wrap_%s' % key] = d = {}
|
| 672 |
+
for v in epdata[k].values():
|
| 673 |
+
s = '%s:%s' % (v.prefix, v.suffix)
|
| 674 |
+
if v.flags:
|
| 675 |
+
s += ' [%s]' % ','.join(v.flags)
|
| 676 |
+
d[v.name] = s
|
| 677 |
+
except Exception:
|
| 678 |
+
logger.warning('Unable to read legacy script '
|
| 679 |
+
'metadata, so cannot generate '
|
| 680 |
+
'scripts')
|
| 681 |
+
else:
|
| 682 |
+
try:
|
| 683 |
+
with zf.open(metadata_name) as bwf:
|
| 684 |
+
wf = wrapper(bwf)
|
| 685 |
+
commands = json.load(wf).get('extensions')
|
| 686 |
+
if commands:
|
| 687 |
+
commands = commands.get('python.commands')
|
| 688 |
+
except Exception:
|
| 689 |
+
logger.warning('Unable to read JSON metadata, so '
|
| 690 |
+
'cannot generate scripts')
|
| 691 |
+
if commands:
|
| 692 |
+
console_scripts = commands.get('wrap_console', {})
|
| 693 |
+
gui_scripts = commands.get('wrap_gui', {})
|
| 694 |
+
if console_scripts or gui_scripts:
|
| 695 |
+
script_dir = paths.get('scripts', '')
|
| 696 |
+
if not os.path.isdir(script_dir):
|
| 697 |
+
raise ValueError('Valid script path not '
|
| 698 |
+
'specified')
|
| 699 |
+
maker.target_dir = script_dir
|
| 700 |
+
for k, v in console_scripts.items():
|
| 701 |
+
script = '%s = %s' % (k, v)
|
| 702 |
+
filenames = maker.make(script)
|
| 703 |
+
fileop.set_executable_mode(filenames)
|
| 704 |
+
|
| 705 |
+
if gui_scripts:
|
| 706 |
+
options = {'gui': True}
|
| 707 |
+
for k, v in gui_scripts.items():
|
| 708 |
+
script = '%s = %s' % (k, v)
|
| 709 |
+
filenames = maker.make(script, options)
|
| 710 |
+
fileop.set_executable_mode(filenames)
|
| 711 |
+
|
| 712 |
+
p = os.path.join(libdir, info_dir)
|
| 713 |
+
dist = InstalledDistribution(p)
|
| 714 |
+
|
| 715 |
+
# Write SHARED
|
| 716 |
+
paths = dict(paths) # don't change passed in dict
|
| 717 |
+
del paths['purelib']
|
| 718 |
+
del paths['platlib']
|
| 719 |
+
paths['lib'] = libdir
|
| 720 |
+
p = dist.write_shared_locations(paths, dry_run)
|
| 721 |
+
if p:
|
| 722 |
+
outfiles.append(p)
|
| 723 |
+
|
| 724 |
+
# Write RECORD
|
| 725 |
+
dist.write_installed_files(outfiles, paths['prefix'], dry_run)
|
| 726 |
+
return dist
|
| 727 |
+
except Exception: # pragma: no cover
|
| 728 |
+
logger.exception('installation failed.')
|
| 729 |
+
fileop.rollback()
|
| 730 |
+
raise
|
| 731 |
+
finally:
|
| 732 |
+
shutil.rmtree(workdir)
|
| 733 |
+
|
| 734 |
+
def _get_dylib_cache(self):
|
| 735 |
+
global cache
|
| 736 |
+
if cache is None:
|
| 737 |
+
# Use native string to avoid issues on 2.x: see Python #20140.
|
| 738 |
+
base = os.path.join(get_cache_base(), str('dylib-cache'), '%s.%s' % sys.version_info[:2])
|
| 739 |
+
cache = Cache(base)
|
| 740 |
+
return cache
|
| 741 |
+
|
| 742 |
+
def _get_extensions(self):
|
| 743 |
+
pathname = os.path.join(self.dirname, self.filename)
|
| 744 |
+
name_ver = '%s-%s' % (self.name, self.version)
|
| 745 |
+
info_dir = '%s.dist-info' % name_ver
|
| 746 |
+
arcname = posixpath.join(info_dir, 'EXTENSIONS')
|
| 747 |
+
wrapper = codecs.getreader('utf-8')
|
| 748 |
+
result = []
|
| 749 |
+
with ZipFile(pathname, 'r') as zf:
|
| 750 |
+
try:
|
| 751 |
+
with zf.open(arcname) as bf:
|
| 752 |
+
wf = wrapper(bf)
|
| 753 |
+
extensions = json.load(wf)
|
| 754 |
+
cache = self._get_dylib_cache()
|
| 755 |
+
prefix = cache.prefix_to_dir(self.filename, use_abspath=False)
|
| 756 |
+
cache_base = os.path.join(cache.base, prefix)
|
| 757 |
+
if not os.path.isdir(cache_base):
|
| 758 |
+
os.makedirs(cache_base)
|
| 759 |
+
for name, relpath in extensions.items():
|
| 760 |
+
dest = os.path.join(cache_base, convert_path(relpath))
|
| 761 |
+
if not os.path.exists(dest):
|
| 762 |
+
extract = True
|
| 763 |
+
else:
|
| 764 |
+
file_time = os.stat(dest).st_mtime
|
| 765 |
+
file_time = datetime.datetime.fromtimestamp(file_time)
|
| 766 |
+
info = zf.getinfo(relpath)
|
| 767 |
+
wheel_time = datetime.datetime(*info.date_time)
|
| 768 |
+
extract = wheel_time > file_time
|
| 769 |
+
if extract:
|
| 770 |
+
zf.extract(relpath, cache_base)
|
| 771 |
+
result.append((name, dest))
|
| 772 |
+
except KeyError:
|
| 773 |
+
pass
|
| 774 |
+
return result
|
| 775 |
+
|
| 776 |
+
def is_compatible(self):
|
| 777 |
+
"""
|
| 778 |
+
Determine if a wheel is compatible with the running system.
|
| 779 |
+
"""
|
| 780 |
+
return is_compatible(self)
|
| 781 |
+
|
| 782 |
+
def is_mountable(self):
|
| 783 |
+
"""
|
| 784 |
+
Determine if a wheel is asserted as mountable by its metadata.
|
| 785 |
+
"""
|
| 786 |
+
return True # for now - metadata details TBD
|
| 787 |
+
|
| 788 |
+
def mount(self, append=False):
|
| 789 |
+
pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
|
| 790 |
+
if not self.is_compatible():
|
| 791 |
+
msg = 'Wheel %s not compatible with this Python.' % pathname
|
| 792 |
+
raise DistlibException(msg)
|
| 793 |
+
if not self.is_mountable():
|
| 794 |
+
msg = 'Wheel %s is marked as not mountable.' % pathname
|
| 795 |
+
raise DistlibException(msg)
|
| 796 |
+
if pathname in sys.path:
|
| 797 |
+
logger.debug('%s already in path', pathname)
|
| 798 |
+
else:
|
| 799 |
+
if append:
|
| 800 |
+
sys.path.append(pathname)
|
| 801 |
+
else:
|
| 802 |
+
sys.path.insert(0, pathname)
|
| 803 |
+
extensions = self._get_extensions()
|
| 804 |
+
if extensions:
|
| 805 |
+
if _hook not in sys.meta_path:
|
| 806 |
+
sys.meta_path.append(_hook)
|
| 807 |
+
_hook.add(pathname, extensions)
|
| 808 |
+
|
| 809 |
+
def unmount(self):
|
| 810 |
+
pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
|
| 811 |
+
if pathname not in sys.path:
|
| 812 |
+
logger.debug('%s not in path', pathname)
|
| 813 |
+
else:
|
| 814 |
+
sys.path.remove(pathname)
|
| 815 |
+
if pathname in _hook.impure_wheels:
|
| 816 |
+
_hook.remove(pathname)
|
| 817 |
+
if not _hook.impure_wheels:
|
| 818 |
+
if _hook in sys.meta_path:
|
| 819 |
+
sys.meta_path.remove(_hook)
|
| 820 |
+
|
| 821 |
+
def verify(self):
|
| 822 |
+
pathname = os.path.join(self.dirname, self.filename)
|
| 823 |
+
name_ver = '%s-%s' % (self.name, self.version)
|
| 824 |
+
# data_dir = '%s.data' % name_ver
|
| 825 |
+
info_dir = '%s.dist-info' % name_ver
|
| 826 |
+
|
| 827 |
+
# metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
|
| 828 |
+
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
|
| 829 |
+
record_name = posixpath.join(info_dir, 'RECORD')
|
| 830 |
+
|
| 831 |
+
wrapper = codecs.getreader('utf-8')
|
| 832 |
+
|
| 833 |
+
with ZipFile(pathname, 'r') as zf:
|
| 834 |
+
with zf.open(wheel_metadata_name) as bwf:
|
| 835 |
+
wf = wrapper(bwf)
|
| 836 |
+
message_from_file(wf)
|
| 837 |
+
# wv = message['Wheel-Version'].split('.', 1)
|
| 838 |
+
# file_version = tuple([int(i) for i in wv])
|
| 839 |
+
# TODO version verification
|
| 840 |
+
|
| 841 |
+
records = {}
|
| 842 |
+
with zf.open(record_name) as bf:
|
| 843 |
+
with CSVReader(stream=bf) as reader:
|
| 844 |
+
for row in reader:
|
| 845 |
+
p = row[0]
|
| 846 |
+
records[p] = row
|
| 847 |
+
|
| 848 |
+
for zinfo in zf.infolist():
|
| 849 |
+
arcname = zinfo.filename
|
| 850 |
+
if isinstance(arcname, text_type):
|
| 851 |
+
u_arcname = arcname
|
| 852 |
+
else:
|
| 853 |
+
u_arcname = arcname.decode('utf-8')
|
| 854 |
+
# See issue #115: some wheels have .. in their entries, but
|
| 855 |
+
# in the filename ... e.g. __main__..py ! So the check is
|
| 856 |
+
# updated to look for .. in the directory portions
|
| 857 |
+
p = u_arcname.split('/')
|
| 858 |
+
if '..' in p:
|
| 859 |
+
raise DistlibException('invalid entry in '
|
| 860 |
+
'wheel: %r' % u_arcname)
|
| 861 |
+
|
| 862 |
+
if self.skip_entry(u_arcname):
|
| 863 |
+
continue
|
| 864 |
+
row = records[u_arcname]
|
| 865 |
+
if row[2] and str(zinfo.file_size) != row[2]:
|
| 866 |
+
raise DistlibException('size mismatch for '
|
| 867 |
+
'%s' % u_arcname)
|
| 868 |
+
if row[1]:
|
| 869 |
+
kind, value = row[1].split('=', 1)
|
| 870 |
+
with zf.open(arcname) as bf:
|
| 871 |
+
data = bf.read()
|
| 872 |
+
_, digest = self.get_hash(data, kind)
|
| 873 |
+
if digest != value:
|
| 874 |
+
raise DistlibException('digest mismatch for '
|
| 875 |
+
'%s' % arcname)
|
| 876 |
+
|
| 877 |
+
def update(self, modifier, dest_dir=None, **kwargs):
|
| 878 |
+
"""
|
| 879 |
+
Update the contents of a wheel in a generic way. The modifier should
|
| 880 |
+
be a callable which expects a dictionary argument: its keys are
|
| 881 |
+
archive-entry paths, and its values are absolute filesystem paths
|
| 882 |
+
where the contents the corresponding archive entries can be found. The
|
| 883 |
+
modifier is free to change the contents of the files pointed to, add
|
| 884 |
+
new entries and remove entries, before returning. This method will
|
| 885 |
+
extract the entire contents of the wheel to a temporary location, call
|
| 886 |
+
the modifier, and then use the passed (and possibly updated)
|
| 887 |
+
dictionary to write a new wheel. If ``dest_dir`` is specified, the new
|
| 888 |
+
wheel is written there -- otherwise, the original wheel is overwritten.
|
| 889 |
+
|
| 890 |
+
The modifier should return True if it updated the wheel, else False.
|
| 891 |
+
This method returns the same value the modifier returns.
|
| 892 |
+
"""
|
| 893 |
+
|
| 894 |
+
def get_version(path_map, info_dir):
|
| 895 |
+
version = path = None
|
| 896 |
+
key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME)
|
| 897 |
+
if key not in path_map:
|
| 898 |
+
key = '%s/PKG-INFO' % info_dir
|
| 899 |
+
if key in path_map:
|
| 900 |
+
path = path_map[key]
|
| 901 |
+
version = Metadata(path=path).version
|
| 902 |
+
return version, path
|
| 903 |
+
|
| 904 |
+
def update_version(version, path):
|
| 905 |
+
updated = None
|
| 906 |
+
try:
|
| 907 |
+
NormalizedVersion(version)
|
| 908 |
+
i = version.find('-')
|
| 909 |
+
if i < 0:
|
| 910 |
+
updated = '%s+1' % version
|
| 911 |
+
else:
|
| 912 |
+
parts = [int(s) for s in version[i + 1:].split('.')]
|
| 913 |
+
parts[-1] += 1
|
| 914 |
+
updated = '%s+%s' % (version[:i], '.'.join(str(i) for i in parts))
|
| 915 |
+
except UnsupportedVersionError:
|
| 916 |
+
logger.debug('Cannot update non-compliant (PEP-440) '
|
| 917 |
+
'version %r', version)
|
| 918 |
+
if updated:
|
| 919 |
+
md = Metadata(path=path)
|
| 920 |
+
md.version = updated
|
| 921 |
+
legacy = path.endswith(LEGACY_METADATA_FILENAME)
|
| 922 |
+
md.write(path=path, legacy=legacy)
|
| 923 |
+
logger.debug('Version updated from %r to %r', version, updated)
|
| 924 |
+
|
| 925 |
+
pathname = os.path.join(self.dirname, self.filename)
|
| 926 |
+
name_ver = '%s-%s' % (self.name, self.version)
|
| 927 |
+
info_dir = '%s.dist-info' % name_ver
|
| 928 |
+
record_name = posixpath.join(info_dir, 'RECORD')
|
| 929 |
+
with tempdir() as workdir:
|
| 930 |
+
with ZipFile(pathname, 'r') as zf:
|
| 931 |
+
path_map = {}
|
| 932 |
+
for zinfo in zf.infolist():
|
| 933 |
+
arcname = zinfo.filename
|
| 934 |
+
if isinstance(arcname, text_type):
|
| 935 |
+
u_arcname = arcname
|
| 936 |
+
else:
|
| 937 |
+
u_arcname = arcname.decode('utf-8')
|
| 938 |
+
if u_arcname == record_name:
|
| 939 |
+
continue
|
| 940 |
+
if '..' in u_arcname:
|
| 941 |
+
raise DistlibException('invalid entry in '
|
| 942 |
+
'wheel: %r' % u_arcname)
|
| 943 |
+
zf.extract(zinfo, workdir)
|
| 944 |
+
path = os.path.join(workdir, convert_path(u_arcname))
|
| 945 |
+
path_map[u_arcname] = path
|
| 946 |
+
|
| 947 |
+
# Remember the version.
|
| 948 |
+
original_version, _ = get_version(path_map, info_dir)
|
| 949 |
+
# Files extracted. Call the modifier.
|
| 950 |
+
modified = modifier(path_map, **kwargs)
|
| 951 |
+
if modified:
|
| 952 |
+
# Something changed - need to build a new wheel.
|
| 953 |
+
current_version, path = get_version(path_map, info_dir)
|
| 954 |
+
if current_version and (current_version == original_version):
|
| 955 |
+
# Add or update local version to signify changes.
|
| 956 |
+
update_version(current_version, path)
|
| 957 |
+
# Decide where the new wheel goes.
|
| 958 |
+
if dest_dir is None:
|
| 959 |
+
fd, newpath = tempfile.mkstemp(suffix='.whl', prefix='wheel-update-', dir=workdir)
|
| 960 |
+
os.close(fd)
|
| 961 |
+
else:
|
| 962 |
+
if not os.path.isdir(dest_dir):
|
| 963 |
+
raise DistlibException('Not a directory: %r' % dest_dir)
|
| 964 |
+
newpath = os.path.join(dest_dir, self.filename)
|
| 965 |
+
archive_paths = list(path_map.items())
|
| 966 |
+
distinfo = os.path.join(workdir, info_dir)
|
| 967 |
+
info = distinfo, info_dir
|
| 968 |
+
self.write_records(info, workdir, archive_paths)
|
| 969 |
+
self.build_zip(newpath, archive_paths)
|
| 970 |
+
if dest_dir is None:
|
| 971 |
+
shutil.copyfile(newpath, pathname)
|
| 972 |
+
return modified
|
| 973 |
+
|
| 974 |
+
|
| 975 |
+
def _get_glibc_version():
|
| 976 |
+
import platform
|
| 977 |
+
ver = platform.libc_ver()
|
| 978 |
+
result = []
|
| 979 |
+
if ver[0] == 'glibc':
|
| 980 |
+
for s in ver[1].split('.'):
|
| 981 |
+
result.append(int(s) if s.isdigit() else 0)
|
| 982 |
+
result = tuple(result)
|
| 983 |
+
return result
|
| 984 |
+
|
| 985 |
+
|
| 986 |
+
def compatible_tags():
|
| 987 |
+
"""
|
| 988 |
+
Return (pyver, abi, arch) tuples compatible with this Python.
|
| 989 |
+
"""
|
| 990 |
+
class _Version:
|
| 991 |
+
def __init__(self, major, minor):
|
| 992 |
+
self.major = major
|
| 993 |
+
self.major_minor = (major, minor)
|
| 994 |
+
self.string = ''.join((str(major), str(minor)))
|
| 995 |
+
|
| 996 |
+
def __str__(self):
|
| 997 |
+
return self.string
|
| 998 |
+
|
| 999 |
+
|
| 1000 |
+
versions = [
|
| 1001 |
+
_Version(sys.version_info.major, minor_version)
|
| 1002 |
+
for minor_version in range(sys.version_info.minor, -1, -1)
|
| 1003 |
+
]
|
| 1004 |
+
abis = []
|
| 1005 |
+
for suffix in _get_suffixes():
|
| 1006 |
+
if suffix.startswith('.abi'):
|
| 1007 |
+
abis.append(suffix.split('.', 2)[1])
|
| 1008 |
+
abis.sort()
|
| 1009 |
+
if ABI != 'none':
|
| 1010 |
+
abis.insert(0, ABI)
|
| 1011 |
+
abis.append('none')
|
| 1012 |
+
result = []
|
| 1013 |
+
|
| 1014 |
+
arches = [ARCH]
|
| 1015 |
+
if sys.platform == 'darwin':
|
| 1016 |
+
m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
|
| 1017 |
+
if m:
|
| 1018 |
+
name, major, minor, arch = m.groups()
|
| 1019 |
+
minor = int(minor)
|
| 1020 |
+
matches = [arch]
|
| 1021 |
+
if arch in ('i386', 'ppc'):
|
| 1022 |
+
matches.append('fat')
|
| 1023 |
+
if arch in ('i386', 'ppc', 'x86_64'):
|
| 1024 |
+
matches.append('fat3')
|
| 1025 |
+
if arch in ('ppc64', 'x86_64'):
|
| 1026 |
+
matches.append('fat64')
|
| 1027 |
+
if arch in ('i386', 'x86_64'):
|
| 1028 |
+
matches.append('intel')
|
| 1029 |
+
if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
|
| 1030 |
+
matches.append('universal')
|
| 1031 |
+
while minor >= 0:
|
| 1032 |
+
for match in matches:
|
| 1033 |
+
s = '%s_%s_%s_%s' % (name, major, minor, match)
|
| 1034 |
+
if s != ARCH: # already there
|
| 1035 |
+
arches.append(s)
|
| 1036 |
+
minor -= 1
|
| 1037 |
+
|
| 1038 |
+
# Most specific - our Python version, ABI and arch
|
| 1039 |
+
for i, version_object in enumerate(versions):
|
| 1040 |
+
version = str(version_object)
|
| 1041 |
+
add_abis = []
|
| 1042 |
+
|
| 1043 |
+
if i == 0:
|
| 1044 |
+
add_abis = abis
|
| 1045 |
+
|
| 1046 |
+
if IMP_PREFIX == 'cp' and version_object.major_minor >= (3, 2):
|
| 1047 |
+
limited_api_abi = 'abi' + str(version_object.major)
|
| 1048 |
+
if limited_api_abi not in add_abis:
|
| 1049 |
+
add_abis.append(limited_api_abi)
|
| 1050 |
+
|
| 1051 |
+
for abi in add_abis:
|
| 1052 |
+
for arch in arches:
|
| 1053 |
+
result.append((''.join((IMP_PREFIX, version)), abi, arch))
|
| 1054 |
+
# manylinux
|
| 1055 |
+
if abi != 'none' and sys.platform.startswith('linux'):
|
| 1056 |
+
arch = arch.replace('linux_', '')
|
| 1057 |
+
parts = _get_glibc_version()
|
| 1058 |
+
if len(parts) == 2:
|
| 1059 |
+
if parts >= (2, 5):
|
| 1060 |
+
result.append((''.join((IMP_PREFIX, version)), abi, 'manylinux1_%s' % arch))
|
| 1061 |
+
if parts >= (2, 12):
|
| 1062 |
+
result.append((''.join((IMP_PREFIX, version)), abi, 'manylinux2010_%s' % arch))
|
| 1063 |
+
if parts >= (2, 17):
|
| 1064 |
+
result.append((''.join((IMP_PREFIX, version)), abi, 'manylinux2014_%s' % arch))
|
| 1065 |
+
result.append((''.join(
|
| 1066 |
+
(IMP_PREFIX, version)), abi, 'manylinux_%s_%s_%s' % (parts[0], parts[1], arch)))
|
| 1067 |
+
|
| 1068 |
+
# where no ABI / arch dependency, but IMP_PREFIX dependency
|
| 1069 |
+
for i, version_object in enumerate(versions):
|
| 1070 |
+
version = str(version_object)
|
| 1071 |
+
result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))
|
| 1072 |
+
if i == 0:
|
| 1073 |
+
result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))
|
| 1074 |
+
|
| 1075 |
+
# no IMP_PREFIX, ABI or arch dependency
|
| 1076 |
+
for i, version_object in enumerate(versions):
|
| 1077 |
+
version = str(version_object)
|
| 1078 |
+
result.append((''.join(('py', version)), 'none', 'any'))
|
| 1079 |
+
if i == 0:
|
| 1080 |
+
result.append((''.join(('py', version[0])), 'none', 'any'))
|
| 1081 |
+
|
| 1082 |
+
return set(result)
|
| 1083 |
+
|
| 1084 |
+
|
| 1085 |
+
COMPATIBLE_TAGS = compatible_tags()
|
| 1086 |
+
|
| 1087 |
+
del compatible_tags
|
| 1088 |
+
|
| 1089 |
+
|
| 1090 |
+
def is_compatible(wheel, tags=None):
|
| 1091 |
+
if not isinstance(wheel, Wheel):
|
| 1092 |
+
wheel = Wheel(wheel) # assume it's a filename
|
| 1093 |
+
result = False
|
| 1094 |
+
if tags is None:
|
| 1095 |
+
tags = COMPATIBLE_TAGS
|
| 1096 |
+
for ver, abi, arch in tags:
|
| 1097 |
+
if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch:
|
| 1098 |
+
result = True
|
| 1099 |
+
break
|
| 1100 |
+
return result
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (581 Bytes). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_elffile.cpython-311.pyc
ADDED
|
Binary file (5.48 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc
ADDED
|
Binary file (10.9 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-311.pyc
ADDED
|
Binary file (3.71 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_tokenizer.cpython-311.pyc
ADDED
|
Binary file (8.57 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/metadata.cpython-311.pyc
ADDED
|
Binary file (28.6 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-311.pyc
ADDED
|
Binary file (4.75 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc
ADDED
|
Binary file (41.3 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-311.pyc
ADDED
|
Binary file (26.2 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-311.pyc
ADDED
|
Binary file (8.33 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-311.pyc
ADDED
|
Binary file (20.9 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/_manylinux.py
ADDED
|
@@ -0,0 +1,262 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import collections
|
| 4 |
+
import contextlib
|
| 5 |
+
import functools
|
| 6 |
+
import os
|
| 7 |
+
import re
|
| 8 |
+
import sys
|
| 9 |
+
import warnings
|
| 10 |
+
from typing import Generator, Iterator, NamedTuple, Sequence
|
| 11 |
+
|
| 12 |
+
from ._elffile import EIClass, EIData, ELFFile, EMachine
|
| 13 |
+
|
| 14 |
+
EF_ARM_ABIMASK = 0xFF000000
|
| 15 |
+
EF_ARM_ABI_VER5 = 0x05000000
|
| 16 |
+
EF_ARM_ABI_FLOAT_HARD = 0x00000400
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
|
| 20 |
+
# as the type for `path` until then.
|
| 21 |
+
@contextlib.contextmanager
|
| 22 |
+
def _parse_elf(path: str) -> Generator[ELFFile | None, None, None]:
|
| 23 |
+
try:
|
| 24 |
+
with open(path, "rb") as f:
|
| 25 |
+
yield ELFFile(f)
|
| 26 |
+
except (OSError, TypeError, ValueError):
|
| 27 |
+
yield None
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def _is_linux_armhf(executable: str) -> bool:
|
| 31 |
+
# hard-float ABI can be detected from the ELF header of the running
|
| 32 |
+
# process
|
| 33 |
+
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
|
| 34 |
+
with _parse_elf(executable) as f:
|
| 35 |
+
return (
|
| 36 |
+
f is not None
|
| 37 |
+
and f.capacity == EIClass.C32
|
| 38 |
+
and f.encoding == EIData.Lsb
|
| 39 |
+
and f.machine == EMachine.Arm
|
| 40 |
+
and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
|
| 41 |
+
and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def _is_linux_i686(executable: str) -> bool:
|
| 46 |
+
with _parse_elf(executable) as f:
|
| 47 |
+
return (
|
| 48 |
+
f is not None
|
| 49 |
+
and f.capacity == EIClass.C32
|
| 50 |
+
and f.encoding == EIData.Lsb
|
| 51 |
+
and f.machine == EMachine.I386
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
|
| 56 |
+
if "armv7l" in archs:
|
| 57 |
+
return _is_linux_armhf(executable)
|
| 58 |
+
if "i686" in archs:
|
| 59 |
+
return _is_linux_i686(executable)
|
| 60 |
+
allowed_archs = {
|
| 61 |
+
"x86_64",
|
| 62 |
+
"aarch64",
|
| 63 |
+
"ppc64",
|
| 64 |
+
"ppc64le",
|
| 65 |
+
"s390x",
|
| 66 |
+
"loongarch64",
|
| 67 |
+
"riscv64",
|
| 68 |
+
}
|
| 69 |
+
return any(arch in allowed_archs for arch in archs)
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
# If glibc ever changes its major version, we need to know what the last
|
| 73 |
+
# minor version was, so we can build the complete list of all versions.
|
| 74 |
+
# For now, guess what the highest minor version might be, assume it will
|
| 75 |
+
# be 50 for testing. Once this actually happens, update the dictionary
|
| 76 |
+
# with the actual value.
|
| 77 |
+
_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class _GLibCVersion(NamedTuple):
|
| 81 |
+
major: int
|
| 82 |
+
minor: int
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def _glibc_version_string_confstr() -> str | None:
|
| 86 |
+
"""
|
| 87 |
+
Primary implementation of glibc_version_string using os.confstr.
|
| 88 |
+
"""
|
| 89 |
+
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
|
| 90 |
+
# to be broken or missing. This strategy is used in the standard library
|
| 91 |
+
# platform module.
|
| 92 |
+
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
|
| 93 |
+
try:
|
| 94 |
+
# Should be a string like "glibc 2.17".
|
| 95 |
+
version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION")
|
| 96 |
+
assert version_string is not None
|
| 97 |
+
_, version = version_string.rsplit()
|
| 98 |
+
except (AssertionError, AttributeError, OSError, ValueError):
|
| 99 |
+
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
|
| 100 |
+
return None
|
| 101 |
+
return version
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def _glibc_version_string_ctypes() -> str | None:
|
| 105 |
+
"""
|
| 106 |
+
Fallback implementation of glibc_version_string using ctypes.
|
| 107 |
+
"""
|
| 108 |
+
try:
|
| 109 |
+
import ctypes
|
| 110 |
+
except ImportError:
|
| 111 |
+
return None
|
| 112 |
+
|
| 113 |
+
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
| 114 |
+
# manpage says, "If filename is NULL, then the returned handle is for the
|
| 115 |
+
# main program". This way we can let the linker do the work to figure out
|
| 116 |
+
# which libc our process is actually using.
|
| 117 |
+
#
|
| 118 |
+
# We must also handle the special case where the executable is not a
|
| 119 |
+
# dynamically linked executable. This can occur when using musl libc,
|
| 120 |
+
# for example. In this situation, dlopen() will error, leading to an
|
| 121 |
+
# OSError. Interestingly, at least in the case of musl, there is no
|
| 122 |
+
# errno set on the OSError. The single string argument used to construct
|
| 123 |
+
# OSError comes from libc itself and is therefore not portable to
|
| 124 |
+
# hard code here. In any case, failure to call dlopen() means we
|
| 125 |
+
# can proceed, so we bail on our attempt.
|
| 126 |
+
try:
|
| 127 |
+
process_namespace = ctypes.CDLL(None)
|
| 128 |
+
except OSError:
|
| 129 |
+
return None
|
| 130 |
+
|
| 131 |
+
try:
|
| 132 |
+
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
| 133 |
+
except AttributeError:
|
| 134 |
+
# Symbol doesn't exist -> therefore, we are not linked to
|
| 135 |
+
# glibc.
|
| 136 |
+
return None
|
| 137 |
+
|
| 138 |
+
# Call gnu_get_libc_version, which returns a string like "2.5"
|
| 139 |
+
gnu_get_libc_version.restype = ctypes.c_char_p
|
| 140 |
+
version_str: str = gnu_get_libc_version()
|
| 141 |
+
# py2 / py3 compatibility:
|
| 142 |
+
if not isinstance(version_str, str):
|
| 143 |
+
version_str = version_str.decode("ascii")
|
| 144 |
+
|
| 145 |
+
return version_str
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def _glibc_version_string() -> str | None:
|
| 149 |
+
"""Returns glibc version string, or None if not using glibc."""
|
| 150 |
+
return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
def _parse_glibc_version(version_str: str) -> tuple[int, int]:
|
| 154 |
+
"""Parse glibc version.
|
| 155 |
+
|
| 156 |
+
We use a regexp instead of str.split because we want to discard any
|
| 157 |
+
random junk that might come after the minor version -- this might happen
|
| 158 |
+
in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
| 159 |
+
uses version strings like "2.20-2014.11"). See gh-3588.
|
| 160 |
+
"""
|
| 161 |
+
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
| 162 |
+
if not m:
|
| 163 |
+
warnings.warn(
|
| 164 |
+
f"Expected glibc version with 2 components major.minor,"
|
| 165 |
+
f" got: {version_str}",
|
| 166 |
+
RuntimeWarning,
|
| 167 |
+
)
|
| 168 |
+
return -1, -1
|
| 169 |
+
return int(m.group("major")), int(m.group("minor"))
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
@functools.lru_cache
|
| 173 |
+
def _get_glibc_version() -> tuple[int, int]:
|
| 174 |
+
version_str = _glibc_version_string()
|
| 175 |
+
if version_str is None:
|
| 176 |
+
return (-1, -1)
|
| 177 |
+
return _parse_glibc_version(version_str)
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
# From PEP 513, PEP 600
|
| 181 |
+
def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
|
| 182 |
+
sys_glibc = _get_glibc_version()
|
| 183 |
+
if sys_glibc < version:
|
| 184 |
+
return False
|
| 185 |
+
# Check for presence of _manylinux module.
|
| 186 |
+
try:
|
| 187 |
+
import _manylinux
|
| 188 |
+
except ImportError:
|
| 189 |
+
return True
|
| 190 |
+
if hasattr(_manylinux, "manylinux_compatible"):
|
| 191 |
+
result = _manylinux.manylinux_compatible(version[0], version[1], arch)
|
| 192 |
+
if result is not None:
|
| 193 |
+
return bool(result)
|
| 194 |
+
return True
|
| 195 |
+
if version == _GLibCVersion(2, 5):
|
| 196 |
+
if hasattr(_manylinux, "manylinux1_compatible"):
|
| 197 |
+
return bool(_manylinux.manylinux1_compatible)
|
| 198 |
+
if version == _GLibCVersion(2, 12):
|
| 199 |
+
if hasattr(_manylinux, "manylinux2010_compatible"):
|
| 200 |
+
return bool(_manylinux.manylinux2010_compatible)
|
| 201 |
+
if version == _GLibCVersion(2, 17):
|
| 202 |
+
if hasattr(_manylinux, "manylinux2014_compatible"):
|
| 203 |
+
return bool(_manylinux.manylinux2014_compatible)
|
| 204 |
+
return True
|
| 205 |
+
|
| 206 |
+
|
| 207 |
+
_LEGACY_MANYLINUX_MAP = {
|
| 208 |
+
# CentOS 7 w/ glibc 2.17 (PEP 599)
|
| 209 |
+
(2, 17): "manylinux2014",
|
| 210 |
+
# CentOS 6 w/ glibc 2.12 (PEP 571)
|
| 211 |
+
(2, 12): "manylinux2010",
|
| 212 |
+
# CentOS 5 w/ glibc 2.5 (PEP 513)
|
| 213 |
+
(2, 5): "manylinux1",
|
| 214 |
+
}
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
| 218 |
+
"""Generate manylinux tags compatible to the current platform.
|
| 219 |
+
|
| 220 |
+
:param archs: Sequence of compatible architectures.
|
| 221 |
+
The first one shall be the closest to the actual architecture and be the part of
|
| 222 |
+
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
| 223 |
+
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
| 224 |
+
be manylinux-compatible.
|
| 225 |
+
|
| 226 |
+
:returns: An iterator of compatible manylinux tags.
|
| 227 |
+
"""
|
| 228 |
+
if not _have_compatible_abi(sys.executable, archs):
|
| 229 |
+
return
|
| 230 |
+
# Oldest glibc to be supported regardless of architecture is (2, 17).
|
| 231 |
+
too_old_glibc2 = _GLibCVersion(2, 16)
|
| 232 |
+
if set(archs) & {"x86_64", "i686"}:
|
| 233 |
+
# On x86/i686 also oldest glibc to be supported is (2, 5).
|
| 234 |
+
too_old_glibc2 = _GLibCVersion(2, 4)
|
| 235 |
+
current_glibc = _GLibCVersion(*_get_glibc_version())
|
| 236 |
+
glibc_max_list = [current_glibc]
|
| 237 |
+
# We can assume compatibility across glibc major versions.
|
| 238 |
+
# https://sourceware.org/bugzilla/show_bug.cgi?id=24636
|
| 239 |
+
#
|
| 240 |
+
# Build a list of maximum glibc versions so that we can
|
| 241 |
+
# output the canonical list of all glibc from current_glibc
|
| 242 |
+
# down to too_old_glibc2, including all intermediary versions.
|
| 243 |
+
for glibc_major in range(current_glibc.major - 1, 1, -1):
|
| 244 |
+
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
|
| 245 |
+
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
|
| 246 |
+
for arch in archs:
|
| 247 |
+
for glibc_max in glibc_max_list:
|
| 248 |
+
if glibc_max.major == too_old_glibc2.major:
|
| 249 |
+
min_minor = too_old_glibc2.minor
|
| 250 |
+
else:
|
| 251 |
+
# For other glibc major versions oldest supported is (x, 0).
|
| 252 |
+
min_minor = -1
|
| 253 |
+
for glibc_minor in range(glibc_max.minor, min_minor, -1):
|
| 254 |
+
glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
|
| 255 |
+
tag = "manylinux_{}_{}".format(*glibc_version)
|
| 256 |
+
if _is_compatible(arch, glibc_version):
|
| 257 |
+
yield f"{tag}_{arch}"
|
| 258 |
+
# Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
|
| 259 |
+
if glibc_version in _LEGACY_MANYLINUX_MAP:
|
| 260 |
+
legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
|
| 261 |
+
if _is_compatible(arch, glibc_version):
|
| 262 |
+
yield f"{legacy_tag}_{arch}"
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/specifiers.py
ADDED
|
@@ -0,0 +1,1009 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
"""
|
| 5 |
+
.. testsetup::
|
| 6 |
+
|
| 7 |
+
from pip._vendor.packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier
|
| 8 |
+
from pip._vendor.packaging.version import Version
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from __future__ import annotations
|
| 12 |
+
|
| 13 |
+
import abc
|
| 14 |
+
import itertools
|
| 15 |
+
import re
|
| 16 |
+
from typing import Callable, Iterable, Iterator, TypeVar, Union
|
| 17 |
+
|
| 18 |
+
from .utils import canonicalize_version
|
| 19 |
+
from .version import Version
|
| 20 |
+
|
| 21 |
+
UnparsedVersion = Union[Version, str]
|
| 22 |
+
UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion)
|
| 23 |
+
CallableOperator = Callable[[Version, str], bool]
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def _coerce_version(version: UnparsedVersion) -> Version:
|
| 27 |
+
if not isinstance(version, Version):
|
| 28 |
+
version = Version(version)
|
| 29 |
+
return version
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class InvalidSpecifier(ValueError):
|
| 33 |
+
"""
|
| 34 |
+
Raised when attempting to create a :class:`Specifier` with a specifier
|
| 35 |
+
string that is invalid.
|
| 36 |
+
|
| 37 |
+
>>> Specifier("lolwat")
|
| 38 |
+
Traceback (most recent call last):
|
| 39 |
+
...
|
| 40 |
+
packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat'
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class BaseSpecifier(metaclass=abc.ABCMeta):
|
| 45 |
+
@abc.abstractmethod
|
| 46 |
+
def __str__(self) -> str:
|
| 47 |
+
"""
|
| 48 |
+
Returns the str representation of this Specifier-like object. This
|
| 49 |
+
should be representative of the Specifier itself.
|
| 50 |
+
"""
|
| 51 |
+
|
| 52 |
+
@abc.abstractmethod
|
| 53 |
+
def __hash__(self) -> int:
|
| 54 |
+
"""
|
| 55 |
+
Returns a hash value for this Specifier-like object.
|
| 56 |
+
"""
|
| 57 |
+
|
| 58 |
+
@abc.abstractmethod
|
| 59 |
+
def __eq__(self, other: object) -> bool:
|
| 60 |
+
"""
|
| 61 |
+
Returns a boolean representing whether or not the two Specifier-like
|
| 62 |
+
objects are equal.
|
| 63 |
+
|
| 64 |
+
:param other: The other object to check against.
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
@property
|
| 68 |
+
@abc.abstractmethod
|
| 69 |
+
def prereleases(self) -> bool | None:
|
| 70 |
+
"""Whether or not pre-releases as a whole are allowed.
|
| 71 |
+
|
| 72 |
+
This can be set to either ``True`` or ``False`` to explicitly enable or disable
|
| 73 |
+
prereleases or it can be set to ``None`` (the default) to use default semantics.
|
| 74 |
+
"""
|
| 75 |
+
|
| 76 |
+
@prereleases.setter
|
| 77 |
+
def prereleases(self, value: bool) -> None:
|
| 78 |
+
"""Setter for :attr:`prereleases`.
|
| 79 |
+
|
| 80 |
+
:param value: The value to set.
|
| 81 |
+
"""
|
| 82 |
+
|
| 83 |
+
@abc.abstractmethod
|
| 84 |
+
def contains(self, item: str, prereleases: bool | None = None) -> bool:
|
| 85 |
+
"""
|
| 86 |
+
Determines if the given item is contained within this specifier.
|
| 87 |
+
"""
|
| 88 |
+
|
| 89 |
+
@abc.abstractmethod
|
| 90 |
+
def filter(
|
| 91 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
|
| 92 |
+
) -> Iterator[UnparsedVersionVar]:
|
| 93 |
+
"""
|
| 94 |
+
Takes an iterable of items and filters them so that only items which
|
| 95 |
+
are contained within this specifier are allowed in it.
|
| 96 |
+
"""
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class Specifier(BaseSpecifier):
|
| 100 |
+
"""This class abstracts handling of version specifiers.
|
| 101 |
+
|
| 102 |
+
.. tip::
|
| 103 |
+
|
| 104 |
+
It is generally not required to instantiate this manually. You should instead
|
| 105 |
+
prefer to work with :class:`SpecifierSet` instead, which can parse
|
| 106 |
+
comma-separated version specifiers (which is what package metadata contains).
|
| 107 |
+
"""
|
| 108 |
+
|
| 109 |
+
_operator_regex_str = r"""
|
| 110 |
+
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
|
| 111 |
+
"""
|
| 112 |
+
_version_regex_str = r"""
|
| 113 |
+
(?P<version>
|
| 114 |
+
(?:
|
| 115 |
+
# The identity operators allow for an escape hatch that will
|
| 116 |
+
# do an exact string match of the version you wish to install.
|
| 117 |
+
# This will not be parsed by PEP 440 and we cannot determine
|
| 118 |
+
# any semantic meaning from it. This operator is discouraged
|
| 119 |
+
# but included entirely as an escape hatch.
|
| 120 |
+
(?<====) # Only match for the identity operator
|
| 121 |
+
\s*
|
| 122 |
+
[^\s;)]* # The arbitrary version can be just about anything,
|
| 123 |
+
# we match everything except for whitespace, a
|
| 124 |
+
# semi-colon for marker support, and a closing paren
|
| 125 |
+
# since versions can be enclosed in them.
|
| 126 |
+
)
|
| 127 |
+
|
|
| 128 |
+
(?:
|
| 129 |
+
# The (non)equality operators allow for wild card and local
|
| 130 |
+
# versions to be specified so we have to define these two
|
| 131 |
+
# operators separately to enable that.
|
| 132 |
+
(?<===|!=) # Only match for equals and not equals
|
| 133 |
+
|
| 134 |
+
\s*
|
| 135 |
+
v?
|
| 136 |
+
(?:[0-9]+!)? # epoch
|
| 137 |
+
[0-9]+(?:\.[0-9]+)* # release
|
| 138 |
+
|
| 139 |
+
# You cannot use a wild card and a pre-release, post-release, a dev or
|
| 140 |
+
# local version together so group them with a | and make them optional.
|
| 141 |
+
(?:
|
| 142 |
+
\.\* # Wild card syntax of .*
|
| 143 |
+
|
|
| 144 |
+
(?: # pre release
|
| 145 |
+
[-_\.]?
|
| 146 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
| 147 |
+
[-_\.]?
|
| 148 |
+
[0-9]*
|
| 149 |
+
)?
|
| 150 |
+
(?: # post release
|
| 151 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
| 152 |
+
)?
|
| 153 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
| 154 |
+
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
| 155 |
+
)?
|
| 156 |
+
)
|
| 157 |
+
|
|
| 158 |
+
(?:
|
| 159 |
+
# The compatible operator requires at least two digits in the
|
| 160 |
+
# release segment.
|
| 161 |
+
(?<=~=) # Only match for the compatible operator
|
| 162 |
+
|
| 163 |
+
\s*
|
| 164 |
+
v?
|
| 165 |
+
(?:[0-9]+!)? # epoch
|
| 166 |
+
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
|
| 167 |
+
(?: # pre release
|
| 168 |
+
[-_\.]?
|
| 169 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
| 170 |
+
[-_\.]?
|
| 171 |
+
[0-9]*
|
| 172 |
+
)?
|
| 173 |
+
(?: # post release
|
| 174 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
| 175 |
+
)?
|
| 176 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
| 177 |
+
)
|
| 178 |
+
|
|
| 179 |
+
(?:
|
| 180 |
+
# All other operators only allow a sub set of what the
|
| 181 |
+
# (non)equality operators do. Specifically they do not allow
|
| 182 |
+
# local versions to be specified nor do they allow the prefix
|
| 183 |
+
# matching wild cards.
|
| 184 |
+
(?<!==|!=|~=) # We have special cases for these
|
| 185 |
+
# operators so we want to make sure they
|
| 186 |
+
# don't match here.
|
| 187 |
+
|
| 188 |
+
\s*
|
| 189 |
+
v?
|
| 190 |
+
(?:[0-9]+!)? # epoch
|
| 191 |
+
[0-9]+(?:\.[0-9]+)* # release
|
| 192 |
+
(?: # pre release
|
| 193 |
+
[-_\.]?
|
| 194 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
| 195 |
+
[-_\.]?
|
| 196 |
+
[0-9]*
|
| 197 |
+
)?
|
| 198 |
+
(?: # post release
|
| 199 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
| 200 |
+
)?
|
| 201 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
| 202 |
+
)
|
| 203 |
+
)
|
| 204 |
+
"""
|
| 205 |
+
|
| 206 |
+
_regex = re.compile(
|
| 207 |
+
r"^\s*" + _operator_regex_str + _version_regex_str + r"\s*$",
|
| 208 |
+
re.VERBOSE | re.IGNORECASE,
|
| 209 |
+
)
|
| 210 |
+
|
| 211 |
+
_operators = {
|
| 212 |
+
"~=": "compatible",
|
| 213 |
+
"==": "equal",
|
| 214 |
+
"!=": "not_equal",
|
| 215 |
+
"<=": "less_than_equal",
|
| 216 |
+
">=": "greater_than_equal",
|
| 217 |
+
"<": "less_than",
|
| 218 |
+
">": "greater_than",
|
| 219 |
+
"===": "arbitrary",
|
| 220 |
+
}
|
| 221 |
+
|
| 222 |
+
def __init__(self, spec: str = "", prereleases: bool | None = None) -> None:
|
| 223 |
+
"""Initialize a Specifier instance.
|
| 224 |
+
|
| 225 |
+
:param spec:
|
| 226 |
+
The string representation of a specifier which will be parsed and
|
| 227 |
+
normalized before use.
|
| 228 |
+
:param prereleases:
|
| 229 |
+
This tells the specifier if it should accept prerelease versions if
|
| 230 |
+
applicable or not. The default of ``None`` will autodetect it from the
|
| 231 |
+
given specifiers.
|
| 232 |
+
:raises InvalidSpecifier:
|
| 233 |
+
If the given specifier is invalid (i.e. bad syntax).
|
| 234 |
+
"""
|
| 235 |
+
match = self._regex.search(spec)
|
| 236 |
+
if not match:
|
| 237 |
+
raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
|
| 238 |
+
|
| 239 |
+
self._spec: tuple[str, str] = (
|
| 240 |
+
match.group("operator").strip(),
|
| 241 |
+
match.group("version").strip(),
|
| 242 |
+
)
|
| 243 |
+
|
| 244 |
+
# Store whether or not this Specifier should accept prereleases
|
| 245 |
+
self._prereleases = prereleases
|
| 246 |
+
|
| 247 |
+
# https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515
|
| 248 |
+
@property # type: ignore[override]
|
| 249 |
+
def prereleases(self) -> bool:
|
| 250 |
+
# If there is an explicit prereleases set for this, then we'll just
|
| 251 |
+
# blindly use that.
|
| 252 |
+
if self._prereleases is not None:
|
| 253 |
+
return self._prereleases
|
| 254 |
+
|
| 255 |
+
# Look at all of our specifiers and determine if they are inclusive
|
| 256 |
+
# operators, and if they are if they are including an explicit
|
| 257 |
+
# prerelease.
|
| 258 |
+
operator, version = self._spec
|
| 259 |
+
if operator in ["==", ">=", "<=", "~=", "==="]:
|
| 260 |
+
# The == specifier can include a trailing .*, if it does we
|
| 261 |
+
# want to remove before parsing.
|
| 262 |
+
if operator == "==" and version.endswith(".*"):
|
| 263 |
+
version = version[:-2]
|
| 264 |
+
|
| 265 |
+
# Parse the version, and if it is a pre-release than this
|
| 266 |
+
# specifier allows pre-releases.
|
| 267 |
+
if Version(version).is_prerelease:
|
| 268 |
+
return True
|
| 269 |
+
|
| 270 |
+
return False
|
| 271 |
+
|
| 272 |
+
@prereleases.setter
|
| 273 |
+
def prereleases(self, value: bool) -> None:
|
| 274 |
+
self._prereleases = value
|
| 275 |
+
|
| 276 |
+
@property
|
| 277 |
+
def operator(self) -> str:
|
| 278 |
+
"""The operator of this specifier.
|
| 279 |
+
|
| 280 |
+
>>> Specifier("==1.2.3").operator
|
| 281 |
+
'=='
|
| 282 |
+
"""
|
| 283 |
+
return self._spec[0]
|
| 284 |
+
|
| 285 |
+
@property
|
| 286 |
+
def version(self) -> str:
|
| 287 |
+
"""The version of this specifier.
|
| 288 |
+
|
| 289 |
+
>>> Specifier("==1.2.3").version
|
| 290 |
+
'1.2.3'
|
| 291 |
+
"""
|
| 292 |
+
return self._spec[1]
|
| 293 |
+
|
| 294 |
+
def __repr__(self) -> str:
|
| 295 |
+
"""A representation of the Specifier that shows all internal state.
|
| 296 |
+
|
| 297 |
+
>>> Specifier('>=1.0.0')
|
| 298 |
+
<Specifier('>=1.0.0')>
|
| 299 |
+
>>> Specifier('>=1.0.0', prereleases=False)
|
| 300 |
+
<Specifier('>=1.0.0', prereleases=False)>
|
| 301 |
+
>>> Specifier('>=1.0.0', prereleases=True)
|
| 302 |
+
<Specifier('>=1.0.0', prereleases=True)>
|
| 303 |
+
"""
|
| 304 |
+
pre = (
|
| 305 |
+
f", prereleases={self.prereleases!r}"
|
| 306 |
+
if self._prereleases is not None
|
| 307 |
+
else ""
|
| 308 |
+
)
|
| 309 |
+
|
| 310 |
+
return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
|
| 311 |
+
|
| 312 |
+
def __str__(self) -> str:
|
| 313 |
+
"""A string representation of the Specifier that can be round-tripped.
|
| 314 |
+
|
| 315 |
+
>>> str(Specifier('>=1.0.0'))
|
| 316 |
+
'>=1.0.0'
|
| 317 |
+
>>> str(Specifier('>=1.0.0', prereleases=False))
|
| 318 |
+
'>=1.0.0'
|
| 319 |
+
"""
|
| 320 |
+
return "{}{}".format(*self._spec)
|
| 321 |
+
|
| 322 |
+
@property
|
| 323 |
+
def _canonical_spec(self) -> tuple[str, str]:
|
| 324 |
+
canonical_version = canonicalize_version(
|
| 325 |
+
self._spec[1],
|
| 326 |
+
strip_trailing_zero=(self._spec[0] != "~="),
|
| 327 |
+
)
|
| 328 |
+
return self._spec[0], canonical_version
|
| 329 |
+
|
| 330 |
+
def __hash__(self) -> int:
|
| 331 |
+
return hash(self._canonical_spec)
|
| 332 |
+
|
| 333 |
+
def __eq__(self, other: object) -> bool:
|
| 334 |
+
"""Whether or not the two Specifier-like objects are equal.
|
| 335 |
+
|
| 336 |
+
:param other: The other object to check against.
|
| 337 |
+
|
| 338 |
+
The value of :attr:`prereleases` is ignored.
|
| 339 |
+
|
| 340 |
+
>>> Specifier("==1.2.3") == Specifier("== 1.2.3.0")
|
| 341 |
+
True
|
| 342 |
+
>>> (Specifier("==1.2.3", prereleases=False) ==
|
| 343 |
+
... Specifier("==1.2.3", prereleases=True))
|
| 344 |
+
True
|
| 345 |
+
>>> Specifier("==1.2.3") == "==1.2.3"
|
| 346 |
+
True
|
| 347 |
+
>>> Specifier("==1.2.3") == Specifier("==1.2.4")
|
| 348 |
+
False
|
| 349 |
+
>>> Specifier("==1.2.3") == Specifier("~=1.2.3")
|
| 350 |
+
False
|
| 351 |
+
"""
|
| 352 |
+
if isinstance(other, str):
|
| 353 |
+
try:
|
| 354 |
+
other = self.__class__(str(other))
|
| 355 |
+
except InvalidSpecifier:
|
| 356 |
+
return NotImplemented
|
| 357 |
+
elif not isinstance(other, self.__class__):
|
| 358 |
+
return NotImplemented
|
| 359 |
+
|
| 360 |
+
return self._canonical_spec == other._canonical_spec
|
| 361 |
+
|
| 362 |
+
def _get_operator(self, op: str) -> CallableOperator:
|
| 363 |
+
operator_callable: CallableOperator = getattr(
|
| 364 |
+
self, f"_compare_{self._operators[op]}"
|
| 365 |
+
)
|
| 366 |
+
return operator_callable
|
| 367 |
+
|
| 368 |
+
def _compare_compatible(self, prospective: Version, spec: str) -> bool:
|
| 369 |
+
# Compatible releases have an equivalent combination of >= and ==. That
|
| 370 |
+
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
|
| 371 |
+
# implement this in terms of the other specifiers instead of
|
| 372 |
+
# implementing it ourselves. The only thing we need to do is construct
|
| 373 |
+
# the other specifiers.
|
| 374 |
+
|
| 375 |
+
# We want everything but the last item in the version, but we want to
|
| 376 |
+
# ignore suffix segments.
|
| 377 |
+
prefix = _version_join(
|
| 378 |
+
list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
|
| 379 |
+
)
|
| 380 |
+
|
| 381 |
+
# Add the prefix notation to the end of our string
|
| 382 |
+
prefix += ".*"
|
| 383 |
+
|
| 384 |
+
return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
|
| 385 |
+
prospective, prefix
|
| 386 |
+
)
|
| 387 |
+
|
| 388 |
+
def _compare_equal(self, prospective: Version, spec: str) -> bool:
|
| 389 |
+
# We need special logic to handle prefix matching
|
| 390 |
+
if spec.endswith(".*"):
|
| 391 |
+
# In the case of prefix matching we want to ignore local segment.
|
| 392 |
+
normalized_prospective = canonicalize_version(
|
| 393 |
+
prospective.public, strip_trailing_zero=False
|
| 394 |
+
)
|
| 395 |
+
# Get the normalized version string ignoring the trailing .*
|
| 396 |
+
normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
|
| 397 |
+
# Split the spec out by bangs and dots, and pretend that there is
|
| 398 |
+
# an implicit dot in between a release segment and a pre-release segment.
|
| 399 |
+
split_spec = _version_split(normalized_spec)
|
| 400 |
+
|
| 401 |
+
# Split the prospective version out by bangs and dots, and pretend
|
| 402 |
+
# that there is an implicit dot in between a release segment and
|
| 403 |
+
# a pre-release segment.
|
| 404 |
+
split_prospective = _version_split(normalized_prospective)
|
| 405 |
+
|
| 406 |
+
# 0-pad the prospective version before shortening it to get the correct
|
| 407 |
+
# shortened version.
|
| 408 |
+
padded_prospective, _ = _pad_version(split_prospective, split_spec)
|
| 409 |
+
|
| 410 |
+
# Shorten the prospective version to be the same length as the spec
|
| 411 |
+
# so that we can determine if the specifier is a prefix of the
|
| 412 |
+
# prospective version or not.
|
| 413 |
+
shortened_prospective = padded_prospective[: len(split_spec)]
|
| 414 |
+
|
| 415 |
+
return shortened_prospective == split_spec
|
| 416 |
+
else:
|
| 417 |
+
# Convert our spec string into a Version
|
| 418 |
+
spec_version = Version(spec)
|
| 419 |
+
|
| 420 |
+
# If the specifier does not have a local segment, then we want to
|
| 421 |
+
# act as if the prospective version also does not have a local
|
| 422 |
+
# segment.
|
| 423 |
+
if not spec_version.local:
|
| 424 |
+
prospective = Version(prospective.public)
|
| 425 |
+
|
| 426 |
+
return prospective == spec_version
|
| 427 |
+
|
| 428 |
+
def _compare_not_equal(self, prospective: Version, spec: str) -> bool:
|
| 429 |
+
return not self._compare_equal(prospective, spec)
|
| 430 |
+
|
| 431 |
+
def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:
|
| 432 |
+
# NB: Local version identifiers are NOT permitted in the version
|
| 433 |
+
# specifier, so local version labels can be universally removed from
|
| 434 |
+
# the prospective version.
|
| 435 |
+
return Version(prospective.public) <= Version(spec)
|
| 436 |
+
|
| 437 |
+
def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:
|
| 438 |
+
# NB: Local version identifiers are NOT permitted in the version
|
| 439 |
+
# specifier, so local version labels can be universally removed from
|
| 440 |
+
# the prospective version.
|
| 441 |
+
return Version(prospective.public) >= Version(spec)
|
| 442 |
+
|
| 443 |
+
def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
|
| 444 |
+
# Convert our spec to a Version instance, since we'll want to work with
|
| 445 |
+
# it as a version.
|
| 446 |
+
spec = Version(spec_str)
|
| 447 |
+
|
| 448 |
+
# Check to see if the prospective version is less than the spec
|
| 449 |
+
# version. If it's not we can short circuit and just return False now
|
| 450 |
+
# instead of doing extra unneeded work.
|
| 451 |
+
if not prospective < spec:
|
| 452 |
+
return False
|
| 453 |
+
|
| 454 |
+
# This special case is here so that, unless the specifier itself
|
| 455 |
+
# includes is a pre-release version, that we do not accept pre-release
|
| 456 |
+
# versions for the version mentioned in the specifier (e.g. <3.1 should
|
| 457 |
+
# not match 3.1.dev0, but should match 3.0.dev0).
|
| 458 |
+
if not spec.is_prerelease and prospective.is_prerelease:
|
| 459 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
| 460 |
+
return False
|
| 461 |
+
|
| 462 |
+
# If we've gotten to here, it means that prospective version is both
|
| 463 |
+
# less than the spec version *and* it's not a pre-release of the same
|
| 464 |
+
# version in the spec.
|
| 465 |
+
return True
|
| 466 |
+
|
| 467 |
+
def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
|
| 468 |
+
# Convert our spec to a Version instance, since we'll want to work with
|
| 469 |
+
# it as a version.
|
| 470 |
+
spec = Version(spec_str)
|
| 471 |
+
|
| 472 |
+
# Check to see if the prospective version is greater than the spec
|
| 473 |
+
# version. If it's not we can short circuit and just return False now
|
| 474 |
+
# instead of doing extra unneeded work.
|
| 475 |
+
if not prospective > spec:
|
| 476 |
+
return False
|
| 477 |
+
|
| 478 |
+
# This special case is here so that, unless the specifier itself
|
| 479 |
+
# includes is a post-release version, that we do not accept
|
| 480 |
+
# post-release versions for the version mentioned in the specifier
|
| 481 |
+
# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
|
| 482 |
+
if not spec.is_postrelease and prospective.is_postrelease:
|
| 483 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
| 484 |
+
return False
|
| 485 |
+
|
| 486 |
+
# Ensure that we do not allow a local version of the version mentioned
|
| 487 |
+
# in the specifier, which is technically greater than, to match.
|
| 488 |
+
if prospective.local is not None:
|
| 489 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
| 490 |
+
return False
|
| 491 |
+
|
| 492 |
+
# If we've gotten to here, it means that prospective version is both
|
| 493 |
+
# greater than the spec version *and* it's not a pre-release of the
|
| 494 |
+
# same version in the spec.
|
| 495 |
+
return True
|
| 496 |
+
|
| 497 |
+
def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
|
| 498 |
+
return str(prospective).lower() == str(spec).lower()
|
| 499 |
+
|
| 500 |
+
def __contains__(self, item: str | Version) -> bool:
|
| 501 |
+
"""Return whether or not the item is contained in this specifier.
|
| 502 |
+
|
| 503 |
+
:param item: The item to check for.
|
| 504 |
+
|
| 505 |
+
This is used for the ``in`` operator and behaves the same as
|
| 506 |
+
:meth:`contains` with no ``prereleases`` argument passed.
|
| 507 |
+
|
| 508 |
+
>>> "1.2.3" in Specifier(">=1.2.3")
|
| 509 |
+
True
|
| 510 |
+
>>> Version("1.2.3") in Specifier(">=1.2.3")
|
| 511 |
+
True
|
| 512 |
+
>>> "1.0.0" in Specifier(">=1.2.3")
|
| 513 |
+
False
|
| 514 |
+
>>> "1.3.0a1" in Specifier(">=1.2.3")
|
| 515 |
+
False
|
| 516 |
+
>>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True)
|
| 517 |
+
True
|
| 518 |
+
"""
|
| 519 |
+
return self.contains(item)
|
| 520 |
+
|
| 521 |
+
def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool:
|
| 522 |
+
"""Return whether or not the item is contained in this specifier.
|
| 523 |
+
|
| 524 |
+
:param item:
|
| 525 |
+
The item to check for, which can be a version string or a
|
| 526 |
+
:class:`Version` instance.
|
| 527 |
+
:param prereleases:
|
| 528 |
+
Whether or not to match prereleases with this Specifier. If set to
|
| 529 |
+
``None`` (the default), it uses :attr:`prereleases` to determine
|
| 530 |
+
whether or not prereleases are allowed.
|
| 531 |
+
|
| 532 |
+
>>> Specifier(">=1.2.3").contains("1.2.3")
|
| 533 |
+
True
|
| 534 |
+
>>> Specifier(">=1.2.3").contains(Version("1.2.3"))
|
| 535 |
+
True
|
| 536 |
+
>>> Specifier(">=1.2.3").contains("1.0.0")
|
| 537 |
+
False
|
| 538 |
+
>>> Specifier(">=1.2.3").contains("1.3.0a1")
|
| 539 |
+
False
|
| 540 |
+
>>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1")
|
| 541 |
+
True
|
| 542 |
+
>>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True)
|
| 543 |
+
True
|
| 544 |
+
"""
|
| 545 |
+
|
| 546 |
+
# Determine if prereleases are to be allowed or not.
|
| 547 |
+
if prereleases is None:
|
| 548 |
+
prereleases = self.prereleases
|
| 549 |
+
|
| 550 |
+
# Normalize item to a Version, this allows us to have a shortcut for
|
| 551 |
+
# "2.0" in Specifier(">=2")
|
| 552 |
+
normalized_item = _coerce_version(item)
|
| 553 |
+
|
| 554 |
+
# Determine if we should be supporting prereleases in this specifier
|
| 555 |
+
# or not, if we do not support prereleases than we can short circuit
|
| 556 |
+
# logic if this version is a prereleases.
|
| 557 |
+
if normalized_item.is_prerelease and not prereleases:
|
| 558 |
+
return False
|
| 559 |
+
|
| 560 |
+
# Actually do the comparison to determine if this item is contained
|
| 561 |
+
# within this Specifier or not.
|
| 562 |
+
operator_callable: CallableOperator = self._get_operator(self.operator)
|
| 563 |
+
return operator_callable(normalized_item, self.version)
|
| 564 |
+
|
| 565 |
+
def filter(
|
| 566 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
|
| 567 |
+
) -> Iterator[UnparsedVersionVar]:
|
| 568 |
+
"""Filter items in the given iterable, that match the specifier.
|
| 569 |
+
|
| 570 |
+
:param iterable:
|
| 571 |
+
An iterable that can contain version strings and :class:`Version` instances.
|
| 572 |
+
The items in the iterable will be filtered according to the specifier.
|
| 573 |
+
:param prereleases:
|
| 574 |
+
Whether or not to allow prereleases in the returned iterator. If set to
|
| 575 |
+
``None`` (the default), it will be intelligently decide whether to allow
|
| 576 |
+
prereleases or not (based on the :attr:`prereleases` attribute, and
|
| 577 |
+
whether the only versions matching are prereleases).
|
| 578 |
+
|
| 579 |
+
This method is smarter than just ``filter(Specifier().contains, [...])``
|
| 580 |
+
because it implements the rule from :pep:`440` that a prerelease item
|
| 581 |
+
SHOULD be accepted if no other versions match the given specifier.
|
| 582 |
+
|
| 583 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
|
| 584 |
+
['1.3']
|
| 585 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")]))
|
| 586 |
+
['1.2.3', '1.3', <Version('1.4')>]
|
| 587 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"]))
|
| 588 |
+
['1.5a1']
|
| 589 |
+
>>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
|
| 590 |
+
['1.3', '1.5a1']
|
| 591 |
+
>>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
|
| 592 |
+
['1.3', '1.5a1']
|
| 593 |
+
"""
|
| 594 |
+
|
| 595 |
+
yielded = False
|
| 596 |
+
found_prereleases = []
|
| 597 |
+
|
| 598 |
+
kw = {"prereleases": prereleases if prereleases is not None else True}
|
| 599 |
+
|
| 600 |
+
# Attempt to iterate over all the values in the iterable and if any of
|
| 601 |
+
# them match, yield them.
|
| 602 |
+
for version in iterable:
|
| 603 |
+
parsed_version = _coerce_version(version)
|
| 604 |
+
|
| 605 |
+
if self.contains(parsed_version, **kw):
|
| 606 |
+
# If our version is a prerelease, and we were not set to allow
|
| 607 |
+
# prereleases, then we'll store it for later in case nothing
|
| 608 |
+
# else matches this specifier.
|
| 609 |
+
if parsed_version.is_prerelease and not (
|
| 610 |
+
prereleases or self.prereleases
|
| 611 |
+
):
|
| 612 |
+
found_prereleases.append(version)
|
| 613 |
+
# Either this is not a prerelease, or we should have been
|
| 614 |
+
# accepting prereleases from the beginning.
|
| 615 |
+
else:
|
| 616 |
+
yielded = True
|
| 617 |
+
yield version
|
| 618 |
+
|
| 619 |
+
# Now that we've iterated over everything, determine if we've yielded
|
| 620 |
+
# any values, and if we have not and we have any prereleases stored up
|
| 621 |
+
# then we will go ahead and yield the prereleases.
|
| 622 |
+
if not yielded and found_prereleases:
|
| 623 |
+
for version in found_prereleases:
|
| 624 |
+
yield version
|
| 625 |
+
|
| 626 |
+
|
| 627 |
+
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
|
| 628 |
+
|
| 629 |
+
|
| 630 |
+
def _version_split(version: str) -> list[str]:
|
| 631 |
+
"""Split version into components.
|
| 632 |
+
|
| 633 |
+
The split components are intended for version comparison. The logic does
|
| 634 |
+
not attempt to retain the original version string, so joining the
|
| 635 |
+
components back with :func:`_version_join` may not produce the original
|
| 636 |
+
version string.
|
| 637 |
+
"""
|
| 638 |
+
result: list[str] = []
|
| 639 |
+
|
| 640 |
+
epoch, _, rest = version.rpartition("!")
|
| 641 |
+
result.append(epoch or "0")
|
| 642 |
+
|
| 643 |
+
for item in rest.split("."):
|
| 644 |
+
match = _prefix_regex.search(item)
|
| 645 |
+
if match:
|
| 646 |
+
result.extend(match.groups())
|
| 647 |
+
else:
|
| 648 |
+
result.append(item)
|
| 649 |
+
return result
|
| 650 |
+
|
| 651 |
+
|
| 652 |
+
def _version_join(components: list[str]) -> str:
|
| 653 |
+
"""Join split version components into a version string.
|
| 654 |
+
|
| 655 |
+
This function assumes the input came from :func:`_version_split`, where the
|
| 656 |
+
first component must be the epoch (either empty or numeric), and all other
|
| 657 |
+
components numeric.
|
| 658 |
+
"""
|
| 659 |
+
epoch, *rest = components
|
| 660 |
+
return f"{epoch}!{'.'.join(rest)}"
|
| 661 |
+
|
| 662 |
+
|
| 663 |
+
def _is_not_suffix(segment: str) -> bool:
|
| 664 |
+
return not any(
|
| 665 |
+
segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
|
| 666 |
+
)
|
| 667 |
+
|
| 668 |
+
|
| 669 |
+
def _pad_version(left: list[str], right: list[str]) -> tuple[list[str], list[str]]:
|
| 670 |
+
left_split, right_split = [], []
|
| 671 |
+
|
| 672 |
+
# Get the release segment of our versions
|
| 673 |
+
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
|
| 674 |
+
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
|
| 675 |
+
|
| 676 |
+
# Get the rest of our versions
|
| 677 |
+
left_split.append(left[len(left_split[0]) :])
|
| 678 |
+
right_split.append(right[len(right_split[0]) :])
|
| 679 |
+
|
| 680 |
+
# Insert our padding
|
| 681 |
+
left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
|
| 682 |
+
right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
|
| 683 |
+
|
| 684 |
+
return (
|
| 685 |
+
list(itertools.chain.from_iterable(left_split)),
|
| 686 |
+
list(itertools.chain.from_iterable(right_split)),
|
| 687 |
+
)
|
| 688 |
+
|
| 689 |
+
|
| 690 |
+
class SpecifierSet(BaseSpecifier):
|
| 691 |
+
"""This class abstracts handling of a set of version specifiers.
|
| 692 |
+
|
| 693 |
+
It can be passed a single specifier (``>=3.0``), a comma-separated list of
|
| 694 |
+
specifiers (``>=3.0,!=3.1``), or no specifier at all.
|
| 695 |
+
"""
|
| 696 |
+
|
| 697 |
+
def __init__(self, specifiers: str = "", prereleases: bool | None = None) -> None:
|
| 698 |
+
"""Initialize a SpecifierSet instance.
|
| 699 |
+
|
| 700 |
+
:param specifiers:
|
| 701 |
+
The string representation of a specifier or a comma-separated list of
|
| 702 |
+
specifiers which will be parsed and normalized before use.
|
| 703 |
+
:param prereleases:
|
| 704 |
+
This tells the SpecifierSet if it should accept prerelease versions if
|
| 705 |
+
applicable or not. The default of ``None`` will autodetect it from the
|
| 706 |
+
given specifiers.
|
| 707 |
+
|
| 708 |
+
:raises InvalidSpecifier:
|
| 709 |
+
If the given ``specifiers`` are not parseable than this exception will be
|
| 710 |
+
raised.
|
| 711 |
+
"""
|
| 712 |
+
|
| 713 |
+
# Split on `,` to break each individual specifier into it's own item, and
|
| 714 |
+
# strip each item to remove leading/trailing whitespace.
|
| 715 |
+
split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
|
| 716 |
+
|
| 717 |
+
# Make each individual specifier a Specifier and save in a frozen set for later.
|
| 718 |
+
self._specs = frozenset(map(Specifier, split_specifiers))
|
| 719 |
+
|
| 720 |
+
# Store our prereleases value so we can use it later to determine if
|
| 721 |
+
# we accept prereleases or not.
|
| 722 |
+
self._prereleases = prereleases
|
| 723 |
+
|
| 724 |
+
@property
|
| 725 |
+
def prereleases(self) -> bool | None:
|
| 726 |
+
# If we have been given an explicit prerelease modifier, then we'll
|
| 727 |
+
# pass that through here.
|
| 728 |
+
if self._prereleases is not None:
|
| 729 |
+
return self._prereleases
|
| 730 |
+
|
| 731 |
+
# If we don't have any specifiers, and we don't have a forced value,
|
| 732 |
+
# then we'll just return None since we don't know if this should have
|
| 733 |
+
# pre-releases or not.
|
| 734 |
+
if not self._specs:
|
| 735 |
+
return None
|
| 736 |
+
|
| 737 |
+
# Otherwise we'll see if any of the given specifiers accept
|
| 738 |
+
# prereleases, if any of them do we'll return True, otherwise False.
|
| 739 |
+
return any(s.prereleases for s in self._specs)
|
| 740 |
+
|
| 741 |
+
@prereleases.setter
|
| 742 |
+
def prereleases(self, value: bool) -> None:
|
| 743 |
+
self._prereleases = value
|
| 744 |
+
|
| 745 |
+
def __repr__(self) -> str:
|
| 746 |
+
"""A representation of the specifier set that shows all internal state.
|
| 747 |
+
|
| 748 |
+
Note that the ordering of the individual specifiers within the set may not
|
| 749 |
+
match the input string.
|
| 750 |
+
|
| 751 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0')
|
| 752 |
+
<SpecifierSet('!=2.0.0,>=1.0.0')>
|
| 753 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False)
|
| 754 |
+
<SpecifierSet('!=2.0.0,>=1.0.0', prereleases=False)>
|
| 755 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True)
|
| 756 |
+
<SpecifierSet('!=2.0.0,>=1.0.0', prereleases=True)>
|
| 757 |
+
"""
|
| 758 |
+
pre = (
|
| 759 |
+
f", prereleases={self.prereleases!r}"
|
| 760 |
+
if self._prereleases is not None
|
| 761 |
+
else ""
|
| 762 |
+
)
|
| 763 |
+
|
| 764 |
+
return f"<SpecifierSet({str(self)!r}{pre})>"
|
| 765 |
+
|
| 766 |
+
def __str__(self) -> str:
|
| 767 |
+
"""A string representation of the specifier set that can be round-tripped.
|
| 768 |
+
|
| 769 |
+
Note that the ordering of the individual specifiers within the set may not
|
| 770 |
+
match the input string.
|
| 771 |
+
|
| 772 |
+
>>> str(SpecifierSet(">=1.0.0,!=1.0.1"))
|
| 773 |
+
'!=1.0.1,>=1.0.0'
|
| 774 |
+
>>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False))
|
| 775 |
+
'!=1.0.1,>=1.0.0'
|
| 776 |
+
"""
|
| 777 |
+
return ",".join(sorted(str(s) for s in self._specs))
|
| 778 |
+
|
| 779 |
+
def __hash__(self) -> int:
|
| 780 |
+
return hash(self._specs)
|
| 781 |
+
|
| 782 |
+
def __and__(self, other: SpecifierSet | str) -> SpecifierSet:
|
| 783 |
+
"""Return a SpecifierSet which is a combination of the two sets.
|
| 784 |
+
|
| 785 |
+
:param other: The other object to combine with.
|
| 786 |
+
|
| 787 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1'
|
| 788 |
+
<SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
|
| 789 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1')
|
| 790 |
+
<SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
|
| 791 |
+
"""
|
| 792 |
+
if isinstance(other, str):
|
| 793 |
+
other = SpecifierSet(other)
|
| 794 |
+
elif not isinstance(other, SpecifierSet):
|
| 795 |
+
return NotImplemented
|
| 796 |
+
|
| 797 |
+
specifier = SpecifierSet()
|
| 798 |
+
specifier._specs = frozenset(self._specs | other._specs)
|
| 799 |
+
|
| 800 |
+
if self._prereleases is None and other._prereleases is not None:
|
| 801 |
+
specifier._prereleases = other._prereleases
|
| 802 |
+
elif self._prereleases is not None and other._prereleases is None:
|
| 803 |
+
specifier._prereleases = self._prereleases
|
| 804 |
+
elif self._prereleases == other._prereleases:
|
| 805 |
+
specifier._prereleases = self._prereleases
|
| 806 |
+
else:
|
| 807 |
+
raise ValueError(
|
| 808 |
+
"Cannot combine SpecifierSets with True and False prerelease "
|
| 809 |
+
"overrides."
|
| 810 |
+
)
|
| 811 |
+
|
| 812 |
+
return specifier
|
| 813 |
+
|
| 814 |
+
def __eq__(self, other: object) -> bool:
|
| 815 |
+
"""Whether or not the two SpecifierSet-like objects are equal.
|
| 816 |
+
|
| 817 |
+
:param other: The other object to check against.
|
| 818 |
+
|
| 819 |
+
The value of :attr:`prereleases` is ignored.
|
| 820 |
+
|
| 821 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1")
|
| 822 |
+
True
|
| 823 |
+
>>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) ==
|
| 824 |
+
... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True))
|
| 825 |
+
True
|
| 826 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1"
|
| 827 |
+
True
|
| 828 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0")
|
| 829 |
+
False
|
| 830 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2")
|
| 831 |
+
False
|
| 832 |
+
"""
|
| 833 |
+
if isinstance(other, (str, Specifier)):
|
| 834 |
+
other = SpecifierSet(str(other))
|
| 835 |
+
elif not isinstance(other, SpecifierSet):
|
| 836 |
+
return NotImplemented
|
| 837 |
+
|
| 838 |
+
return self._specs == other._specs
|
| 839 |
+
|
| 840 |
+
def __len__(self) -> int:
|
| 841 |
+
"""Returns the number of specifiers in this specifier set."""
|
| 842 |
+
return len(self._specs)
|
| 843 |
+
|
| 844 |
+
def __iter__(self) -> Iterator[Specifier]:
|
| 845 |
+
"""
|
| 846 |
+
Returns an iterator over all the underlying :class:`Specifier` instances
|
| 847 |
+
in this specifier set.
|
| 848 |
+
|
| 849 |
+
>>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str)
|
| 850 |
+
[<Specifier('!=1.0.1')>, <Specifier('>=1.0.0')>]
|
| 851 |
+
"""
|
| 852 |
+
return iter(self._specs)
|
| 853 |
+
|
| 854 |
+
def __contains__(self, item: UnparsedVersion) -> bool:
|
| 855 |
+
"""Return whether or not the item is contained in this specifier.
|
| 856 |
+
|
| 857 |
+
:param item: The item to check for.
|
| 858 |
+
|
| 859 |
+
This is used for the ``in`` operator and behaves the same as
|
| 860 |
+
:meth:`contains` with no ``prereleases`` argument passed.
|
| 861 |
+
|
| 862 |
+
>>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 863 |
+
True
|
| 864 |
+
>>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 865 |
+
True
|
| 866 |
+
>>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 867 |
+
False
|
| 868 |
+
>>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 869 |
+
False
|
| 870 |
+
>>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)
|
| 871 |
+
True
|
| 872 |
+
"""
|
| 873 |
+
return self.contains(item)
|
| 874 |
+
|
| 875 |
+
def contains(
|
| 876 |
+
self,
|
| 877 |
+
item: UnparsedVersion,
|
| 878 |
+
prereleases: bool | None = None,
|
| 879 |
+
installed: bool | None = None,
|
| 880 |
+
) -> bool:
|
| 881 |
+
"""Return whether or not the item is contained in this SpecifierSet.
|
| 882 |
+
|
| 883 |
+
:param item:
|
| 884 |
+
The item to check for, which can be a version string or a
|
| 885 |
+
:class:`Version` instance.
|
| 886 |
+
:param prereleases:
|
| 887 |
+
Whether or not to match prereleases with this SpecifierSet. If set to
|
| 888 |
+
``None`` (the default), it uses :attr:`prereleases` to determine
|
| 889 |
+
whether or not prereleases are allowed.
|
| 890 |
+
|
| 891 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3")
|
| 892 |
+
True
|
| 893 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3"))
|
| 894 |
+
True
|
| 895 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1")
|
| 896 |
+
False
|
| 897 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1")
|
| 898 |
+
False
|
| 899 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1")
|
| 900 |
+
True
|
| 901 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True)
|
| 902 |
+
True
|
| 903 |
+
"""
|
| 904 |
+
# Ensure that our item is a Version instance.
|
| 905 |
+
if not isinstance(item, Version):
|
| 906 |
+
item = Version(item)
|
| 907 |
+
|
| 908 |
+
# Determine if we're forcing a prerelease or not, if we're not forcing
|
| 909 |
+
# one for this particular filter call, then we'll use whatever the
|
| 910 |
+
# SpecifierSet thinks for whether or not we should support prereleases.
|
| 911 |
+
if prereleases is None:
|
| 912 |
+
prereleases = self.prereleases
|
| 913 |
+
|
| 914 |
+
# We can determine if we're going to allow pre-releases by looking to
|
| 915 |
+
# see if any of the underlying items supports them. If none of them do
|
| 916 |
+
# and this item is a pre-release then we do not allow it and we can
|
| 917 |
+
# short circuit that here.
|
| 918 |
+
# Note: This means that 1.0.dev1 would not be contained in something
|
| 919 |
+
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
|
| 920 |
+
if not prereleases and item.is_prerelease:
|
| 921 |
+
return False
|
| 922 |
+
|
| 923 |
+
if installed and item.is_prerelease:
|
| 924 |
+
item = Version(item.base_version)
|
| 925 |
+
|
| 926 |
+
# We simply dispatch to the underlying specs here to make sure that the
|
| 927 |
+
# given version is contained within all of them.
|
| 928 |
+
# Note: This use of all() here means that an empty set of specifiers
|
| 929 |
+
# will always return True, this is an explicit design decision.
|
| 930 |
+
return all(s.contains(item, prereleases=prereleases) for s in self._specs)
|
| 931 |
+
|
| 932 |
+
def filter(
|
| 933 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
|
| 934 |
+
) -> Iterator[UnparsedVersionVar]:
|
| 935 |
+
"""Filter items in the given iterable, that match the specifiers in this set.
|
| 936 |
+
|
| 937 |
+
:param iterable:
|
| 938 |
+
An iterable that can contain version strings and :class:`Version` instances.
|
| 939 |
+
The items in the iterable will be filtered according to the specifier.
|
| 940 |
+
:param prereleases:
|
| 941 |
+
Whether or not to allow prereleases in the returned iterator. If set to
|
| 942 |
+
``None`` (the default), it will be intelligently decide whether to allow
|
| 943 |
+
prereleases or not (based on the :attr:`prereleases` attribute, and
|
| 944 |
+
whether the only versions matching are prereleases).
|
| 945 |
+
|
| 946 |
+
This method is smarter than just ``filter(SpecifierSet(...).contains, [...])``
|
| 947 |
+
because it implements the rule from :pep:`440` that a prerelease item
|
| 948 |
+
SHOULD be accepted if no other versions match the given specifier.
|
| 949 |
+
|
| 950 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
|
| 951 |
+
['1.3']
|
| 952 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")]))
|
| 953 |
+
['1.3', <Version('1.4')>]
|
| 954 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"]))
|
| 955 |
+
[]
|
| 956 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
|
| 957 |
+
['1.3', '1.5a1']
|
| 958 |
+
>>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
|
| 959 |
+
['1.3', '1.5a1']
|
| 960 |
+
|
| 961 |
+
An "empty" SpecifierSet will filter items based on the presence of prerelease
|
| 962 |
+
versions in the set.
|
| 963 |
+
|
| 964 |
+
>>> list(SpecifierSet("").filter(["1.3", "1.5a1"]))
|
| 965 |
+
['1.3']
|
| 966 |
+
>>> list(SpecifierSet("").filter(["1.5a1"]))
|
| 967 |
+
['1.5a1']
|
| 968 |
+
>>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"]))
|
| 969 |
+
['1.3', '1.5a1']
|
| 970 |
+
>>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True))
|
| 971 |
+
['1.3', '1.5a1']
|
| 972 |
+
"""
|
| 973 |
+
# Determine if we're forcing a prerelease or not, if we're not forcing
|
| 974 |
+
# one for this particular filter call, then we'll use whatever the
|
| 975 |
+
# SpecifierSet thinks for whether or not we should support prereleases.
|
| 976 |
+
if prereleases is None:
|
| 977 |
+
prereleases = self.prereleases
|
| 978 |
+
|
| 979 |
+
# If we have any specifiers, then we want to wrap our iterable in the
|
| 980 |
+
# filter method for each one, this will act as a logical AND amongst
|
| 981 |
+
# each specifier.
|
| 982 |
+
if self._specs:
|
| 983 |
+
for spec in self._specs:
|
| 984 |
+
iterable = spec.filter(iterable, prereleases=bool(prereleases))
|
| 985 |
+
return iter(iterable)
|
| 986 |
+
# If we do not have any specifiers, then we need to have a rough filter
|
| 987 |
+
# which will filter out any pre-releases, unless there are no final
|
| 988 |
+
# releases.
|
| 989 |
+
else:
|
| 990 |
+
filtered: list[UnparsedVersionVar] = []
|
| 991 |
+
found_prereleases: list[UnparsedVersionVar] = []
|
| 992 |
+
|
| 993 |
+
for item in iterable:
|
| 994 |
+
parsed_version = _coerce_version(item)
|
| 995 |
+
|
| 996 |
+
# Store any item which is a pre-release for later unless we've
|
| 997 |
+
# already found a final version or we are accepting prereleases
|
| 998 |
+
if parsed_version.is_prerelease and not prereleases:
|
| 999 |
+
if not filtered:
|
| 1000 |
+
found_prereleases.append(item)
|
| 1001 |
+
else:
|
| 1002 |
+
filtered.append(item)
|
| 1003 |
+
|
| 1004 |
+
# If we've found no items except for pre-releases, then we'll go
|
| 1005 |
+
# ahead and use the pre-releases
|
| 1006 |
+
if not filtered and found_prereleases and prereleases is None:
|
| 1007 |
+
return iter(found_prereleases)
|
| 1008 |
+
|
| 1009 |
+
return iter(filtered)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/packaging/utils.py
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
from __future__ import annotations
|
| 6 |
+
|
| 7 |
+
import re
|
| 8 |
+
from typing import NewType, Tuple, Union, cast
|
| 9 |
+
|
| 10 |
+
from .tags import Tag, parse_tag
|
| 11 |
+
from .version import InvalidVersion, Version
|
| 12 |
+
|
| 13 |
+
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
| 14 |
+
NormalizedName = NewType("NormalizedName", str)
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class InvalidName(ValueError):
|
| 18 |
+
"""
|
| 19 |
+
An invalid distribution name; users should refer to the packaging user guide.
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class InvalidWheelFilename(ValueError):
|
| 24 |
+
"""
|
| 25 |
+
An invalid wheel filename was found, users should refer to PEP 427.
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class InvalidSdistFilename(ValueError):
|
| 30 |
+
"""
|
| 31 |
+
An invalid sdist filename was found, users should refer to the packaging user guide.
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
# Core metadata spec for `Name`
|
| 36 |
+
_validate_regex = re.compile(
|
| 37 |
+
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
|
| 38 |
+
)
|
| 39 |
+
_canonicalize_regex = re.compile(r"[-_.]+")
|
| 40 |
+
_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
|
| 41 |
+
# PEP 427: The build number must start with a digit.
|
| 42 |
+
_build_tag_regex = re.compile(r"(\d+)(.*)")
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
|
| 46 |
+
if validate and not _validate_regex.match(name):
|
| 47 |
+
raise InvalidName(f"name is invalid: {name!r}")
|
| 48 |
+
# This is taken from PEP 503.
|
| 49 |
+
value = _canonicalize_regex.sub("-", name).lower()
|
| 50 |
+
return cast(NormalizedName, value)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def is_normalized_name(name: str) -> bool:
|
| 54 |
+
return _normalized_regex.match(name) is not None
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def canonicalize_version(
|
| 58 |
+
version: Version | str, *, strip_trailing_zero: bool = True
|
| 59 |
+
) -> str:
|
| 60 |
+
"""
|
| 61 |
+
This is very similar to Version.__str__, but has one subtle difference
|
| 62 |
+
with the way it handles the release segment.
|
| 63 |
+
"""
|
| 64 |
+
if isinstance(version, str):
|
| 65 |
+
try:
|
| 66 |
+
parsed = Version(version)
|
| 67 |
+
except InvalidVersion:
|
| 68 |
+
# Legacy versions cannot be normalized
|
| 69 |
+
return version
|
| 70 |
+
else:
|
| 71 |
+
parsed = version
|
| 72 |
+
|
| 73 |
+
parts = []
|
| 74 |
+
|
| 75 |
+
# Epoch
|
| 76 |
+
if parsed.epoch != 0:
|
| 77 |
+
parts.append(f"{parsed.epoch}!")
|
| 78 |
+
|
| 79 |
+
# Release segment
|
| 80 |
+
release_segment = ".".join(str(x) for x in parsed.release)
|
| 81 |
+
if strip_trailing_zero:
|
| 82 |
+
# NB: This strips trailing '.0's to normalize
|
| 83 |
+
release_segment = re.sub(r"(\.0)+$", "", release_segment)
|
| 84 |
+
parts.append(release_segment)
|
| 85 |
+
|
| 86 |
+
# Pre-release
|
| 87 |
+
if parsed.pre is not None:
|
| 88 |
+
parts.append("".join(str(x) for x in parsed.pre))
|
| 89 |
+
|
| 90 |
+
# Post-release
|
| 91 |
+
if parsed.post is not None:
|
| 92 |
+
parts.append(f".post{parsed.post}")
|
| 93 |
+
|
| 94 |
+
# Development release
|
| 95 |
+
if parsed.dev is not None:
|
| 96 |
+
parts.append(f".dev{parsed.dev}")
|
| 97 |
+
|
| 98 |
+
# Local version segment
|
| 99 |
+
if parsed.local is not None:
|
| 100 |
+
parts.append(f"+{parsed.local}")
|
| 101 |
+
|
| 102 |
+
return "".join(parts)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def parse_wheel_filename(
|
| 106 |
+
filename: str,
|
| 107 |
+
) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]:
|
| 108 |
+
if not filename.endswith(".whl"):
|
| 109 |
+
raise InvalidWheelFilename(
|
| 110 |
+
f"Invalid wheel filename (extension must be '.whl'): {filename}"
|
| 111 |
+
)
|
| 112 |
+
|
| 113 |
+
filename = filename[:-4]
|
| 114 |
+
dashes = filename.count("-")
|
| 115 |
+
if dashes not in (4, 5):
|
| 116 |
+
raise InvalidWheelFilename(
|
| 117 |
+
f"Invalid wheel filename (wrong number of parts): {filename}"
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
parts = filename.split("-", dashes - 2)
|
| 121 |
+
name_part = parts[0]
|
| 122 |
+
# See PEP 427 for the rules on escaping the project name.
|
| 123 |
+
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
|
| 124 |
+
raise InvalidWheelFilename(f"Invalid project name: {filename}")
|
| 125 |
+
name = canonicalize_name(name_part)
|
| 126 |
+
|
| 127 |
+
try:
|
| 128 |
+
version = Version(parts[1])
|
| 129 |
+
except InvalidVersion as e:
|
| 130 |
+
raise InvalidWheelFilename(
|
| 131 |
+
f"Invalid wheel filename (invalid version): {filename}"
|
| 132 |
+
) from e
|
| 133 |
+
|
| 134 |
+
if dashes == 5:
|
| 135 |
+
build_part = parts[2]
|
| 136 |
+
build_match = _build_tag_regex.match(build_part)
|
| 137 |
+
if build_match is None:
|
| 138 |
+
raise InvalidWheelFilename(
|
| 139 |
+
f"Invalid build number: {build_part} in '{filename}'"
|
| 140 |
+
)
|
| 141 |
+
build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
|
| 142 |
+
else:
|
| 143 |
+
build = ()
|
| 144 |
+
tags = parse_tag(parts[-1])
|
| 145 |
+
return (name, version, build, tags)
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]:
|
| 149 |
+
if filename.endswith(".tar.gz"):
|
| 150 |
+
file_stem = filename[: -len(".tar.gz")]
|
| 151 |
+
elif filename.endswith(".zip"):
|
| 152 |
+
file_stem = filename[: -len(".zip")]
|
| 153 |
+
else:
|
| 154 |
+
raise InvalidSdistFilename(
|
| 155 |
+
f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
|
| 156 |
+
f" {filename}"
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
# We are requiring a PEP 440 version, which cannot contain dashes,
|
| 160 |
+
# so we split on the last dash.
|
| 161 |
+
name_part, sep, version_part = file_stem.rpartition("-")
|
| 162 |
+
if not sep:
|
| 163 |
+
raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
|
| 164 |
+
|
| 165 |
+
name = canonicalize_name(name_part)
|
| 166 |
+
|
| 167 |
+
try:
|
| 168 |
+
version = Version(version_part)
|
| 169 |
+
except InvalidVersion as e:
|
| 170 |
+
raise InvalidSdistFilename(
|
| 171 |
+
f"Invalid sdist filename (invalid version): {filename}"
|
| 172 |
+
) from e
|
| 173 |
+
|
| 174 |
+
return (name, version)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/urllib3/__init__.py
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
|
| 3 |
+
"""
|
| 4 |
+
from __future__ import absolute_import
|
| 5 |
+
|
| 6 |
+
# Set default logging handler to avoid "No handler found" warnings.
|
| 7 |
+
import logging
|
| 8 |
+
import warnings
|
| 9 |
+
from logging import NullHandler
|
| 10 |
+
|
| 11 |
+
from . import exceptions
|
| 12 |
+
from ._version import __version__
|
| 13 |
+
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
|
| 14 |
+
from .filepost import encode_multipart_formdata
|
| 15 |
+
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
|
| 16 |
+
from .response import HTTPResponse
|
| 17 |
+
from .util.request import make_headers
|
| 18 |
+
from .util.retry import Retry
|
| 19 |
+
from .util.timeout import Timeout
|
| 20 |
+
from .util.url import get_host
|
| 21 |
+
|
| 22 |
+
# === NOTE TO REPACKAGERS AND VENDORS ===
|
| 23 |
+
# Please delete this block, this logic is only
|
| 24 |
+
# for urllib3 being distributed via PyPI.
|
| 25 |
+
# See: https://github.com/urllib3/urllib3/issues/2680
|
| 26 |
+
try:
|
| 27 |
+
import urllib3_secure_extra # type: ignore # noqa: F401
|
| 28 |
+
except ImportError:
|
| 29 |
+
pass
|
| 30 |
+
else:
|
| 31 |
+
warnings.warn(
|
| 32 |
+
"'urllib3[secure]' extra is deprecated and will be removed "
|
| 33 |
+
"in a future release of urllib3 2.x. Read more in this issue: "
|
| 34 |
+
"https://github.com/urllib3/urllib3/issues/2680",
|
| 35 |
+
category=DeprecationWarning,
|
| 36 |
+
stacklevel=2,
|
| 37 |
+
)
|
| 38 |
+
|
| 39 |
+
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
|
| 40 |
+
__license__ = "MIT"
|
| 41 |
+
__version__ = __version__
|
| 42 |
+
|
| 43 |
+
__all__ = (
|
| 44 |
+
"HTTPConnectionPool",
|
| 45 |
+
"HTTPSConnectionPool",
|
| 46 |
+
"PoolManager",
|
| 47 |
+
"ProxyManager",
|
| 48 |
+
"HTTPResponse",
|
| 49 |
+
"Retry",
|
| 50 |
+
"Timeout",
|
| 51 |
+
"add_stderr_logger",
|
| 52 |
+
"connection_from_url",
|
| 53 |
+
"disable_warnings",
|
| 54 |
+
"encode_multipart_formdata",
|
| 55 |
+
"get_host",
|
| 56 |
+
"make_headers",
|
| 57 |
+
"proxy_from_url",
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
logging.getLogger(__name__).addHandler(NullHandler())
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def add_stderr_logger(level=logging.DEBUG):
|
| 64 |
+
"""
|
| 65 |
+
Helper for quickly adding a StreamHandler to the logger. Useful for
|
| 66 |
+
debugging.
|
| 67 |
+
|
| 68 |
+
Returns the handler after adding it.
|
| 69 |
+
"""
|
| 70 |
+
# This method needs to be in this __init__.py to get the __name__ correct
|
| 71 |
+
# even if urllib3 is vendored within another package.
|
| 72 |
+
logger = logging.getLogger(__name__)
|
| 73 |
+
handler = logging.StreamHandler()
|
| 74 |
+
handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
|
| 75 |
+
logger.addHandler(handler)
|
| 76 |
+
logger.setLevel(level)
|
| 77 |
+
logger.debug("Added a stderr logging handler to logger: %s", __name__)
|
| 78 |
+
return handler
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
# ... Clean up.
|
| 82 |
+
del NullHandler
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
# All warning filters *must* be appended unless you're really certain that they
|
| 86 |
+
# shouldn't be: otherwise, it's very hard for users to use most Python
|
| 87 |
+
# mechanisms to silence them.
|
| 88 |
+
# SecurityWarning's always go off by default.
|
| 89 |
+
warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
|
| 90 |
+
# SubjectAltNameWarning's should go off once per host
|
| 91 |
+
warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True)
|
| 92 |
+
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
|
| 93 |
+
warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
|
| 94 |
+
# SNIMissingWarnings should go off only once.
|
| 95 |
+
warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True)
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
def disable_warnings(category=exceptions.HTTPWarning):
|
| 99 |
+
"""
|
| 100 |
+
Helper for quickly disabling all urllib3 warnings.
|
| 101 |
+
"""
|
| 102 |
+
warnings.simplefilter("ignore", category)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_vendor/urllib3/_version.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is protected via CODEOWNERS
|
| 2 |
+
__version__ = "1.26.20"
|