pull/717/head
jnopareboateng 2 years ago
parent fe8dcd09c8
commit 8f441251f7

3
.gitignore vendored

@ -361,4 +361,5 @@ MigrationBackup/
.Rhistory
ML-For-Beginners.Rproj
.venv
.obsidian
.obsidian
.venv/Lib/site-packages/

@ -1,20 +0,0 @@
Copyright (c) 2008-present The pip developers (see AUTHORS.txt file)
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

@ -1,88 +0,0 @@
Metadata-Version: 2.1
Name: pip
Version: 23.0.1
Summary: The PyPA recommended tool for installing Python packages.
Home-page: https://pip.pypa.io/
Author: The pip developers
Author-email: distutils-sig@python.org
License: MIT
Project-URL: Documentation, https://pip.pypa.io
Project-URL: Source, https://github.com/pypa/pip
Project-URL: Changelog, https://pip.pypa.io/en/stable/news/
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Topic :: Software Development :: Build Tools
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Requires-Python: >=3.7
License-File: LICENSE.txt
pip - The Python Package Installer
==================================
.. image:: https://img.shields.io/pypi/v/pip.svg
:target: https://pypi.org/project/pip/
.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
:target: https://pip.pypa.io/en/latest
pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
Please take a look at our documentation for how to install and use pip:
* `Installation`_
* `Usage`_
We release updates regularly, with a new version every 3 months. Find more details in our documentation:
* `Release notes`_
* `Release process`_
In pip 20.3, we've `made a big improvement to the heart of pip`_; `learn more`_. We want your input, so `sign up for our user experience research studies`_ to help us do it right.
**Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3.
If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms:
* `Issue tracking`_
* `Discourse channel`_
* `User IRC`_
If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms:
* `GitHub page`_
* `Development documentation`_
* `Development IRC`_
Code of Conduct
---------------
Everyone interacting in the pip project's codebases, issue trackers, chat
rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
.. _package installer: https://packaging.python.org/guides/tool-recommendations/
.. _Python Package Index: https://pypi.org
.. _Installation: https://pip.pypa.io/en/stable/installation/
.. _Usage: https://pip.pypa.io/en/stable/
.. _Release notes: https://pip.pypa.io/en/stable/news.html
.. _Release process: https://pip.pypa.io/en/latest/development/release-process/
.. _GitHub page: https://github.com/pypa/pip
.. _Development documentation: https://pip.pypa.io/en/latest/development
.. _made a big improvement to the heart of pip: https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html
.. _learn more: https://pip.pypa.io/en/latest/user_guide/#changes-to-the-pip-dependency-resolver-in-20-3-2020
.. _sign up for our user experience research studies: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html
.. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support
.. _Issue tracking: https://github.com/pypa/pip/issues
.. _Discourse channel: https://discuss.python.org/c/packaging
.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa
.. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev
.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md

File diff suppressed because it is too large Load Diff

@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.38.4)
Root-Is-Purelib: true
Tag: py3-none-any

@ -1,4 +0,0 @@
[console_scripts]
pip = pip._internal.cli.main:main
pip3 = pip._internal.cli.main:main
pip3.9 = pip._internal.cli.main:main

@ -1,6 +1,6 @@
from typing import List, Optional
__version__ = "23.0.1"
__version__ = "23.3"
def main(args: Optional[List[str]] = None) -> int:

@ -1,6 +1,5 @@
import os
import sys
import warnings
# Remove '' and current working directory from the first entry
# of sys.path, if present to avoid using current directory
@ -20,12 +19,6 @@ if __package__ == "":
sys.path.insert(0, path)
if __name__ == "__main__":
# Work around the error reported in #9540, pending a proper fix.
# Note: It is essential the warning filter is set *before* importing
# pip, as the deprecation happens at import time, not runtime.
warnings.filterwarnings(
"ignore", category=DeprecationWarning, module=".*packaging\\.version"
)
from pip._internal.cli.main import main as _main
sys.exit(_main())

@ -1,6 +1,5 @@
from typing import List, Optional
import pip._internal.utils.inject_securetransport # noqa
from pip._internal.utils import _log
# init_logging() must be called before any call to logging.getLogger()

@ -6,14 +6,13 @@ import json
import logging
import os
from pathlib import Path
from typing import Any, Dict, List, Optional, Set
from typing import Any, Dict, List, Optional
from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.exceptions import InvalidWheelFilename
from pip._internal.models.direct_url import DirectUrl
from pip._internal.models.format_control import FormatControl
from pip._internal.models.link import Link
from pip._internal.models.wheel import Wheel
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
@ -33,25 +32,13 @@ def _hash_dict(d: Dict[str, str]) -> str:
class Cache:
"""An abstract class - provides cache directories for data from links
:param cache_dir: The root of the cache.
:param format_control: An object of FormatControl class to limit
binaries being read from the cache.
:param allowed_formats: which formats of files the cache should store.
('binary' and 'source' are the only allowed values)
"""
def __init__(
self, cache_dir: str, format_control: FormatControl, allowed_formats: Set[str]
) -> None:
def __init__(self, cache_dir: str) -> None:
super().__init__()
assert not cache_dir or os.path.isabs(cache_dir)
self.cache_dir = cache_dir or None
self.format_control = format_control
self.allowed_formats = allowed_formats
_valid_formats = {"source", "binary"}
assert self.allowed_formats.union(_valid_formats) == _valid_formats
def _get_cache_path_parts(self, link: Link) -> List[str]:
"""Get parts of part that must be os.path.joined with cache_dir"""
@ -91,16 +78,10 @@ class Cache:
if can_not_cache:
return []
formats = self.format_control.get_allowed_formats(canonical_package_name)
if not self.allowed_formats.intersection(formats):
return []
candidates = []
path = self.get_path_for_link(link)
if os.path.isdir(path):
for candidate in os.listdir(path):
candidates.append((candidate, path))
return candidates
return [(candidate, path) for candidate in os.listdir(path)]
return []
def get_path_for_link(self, link: Link) -> str:
"""Return a directory to store cached items in for link."""
@ -121,8 +102,8 @@ class Cache:
class SimpleWheelCache(Cache):
"""A cache of wheels for future installs."""
def __init__(self, cache_dir: str, format_control: FormatControl) -> None:
super().__init__(cache_dir, format_control, {"binary"})
def __init__(self, cache_dir: str) -> None:
super().__init__(cache_dir)
def get_path_for_link(self, link: Link) -> str:
"""Return a directory to store cached wheels for link
@ -191,13 +172,13 @@ class SimpleWheelCache(Cache):
class EphemWheelCache(SimpleWheelCache):
"""A SimpleWheelCache that creates it's own temporary cache directory"""
def __init__(self, format_control: FormatControl) -> None:
def __init__(self) -> None:
self._temp_dir = TempDirectory(
kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
globally_managed=True,
)
super().__init__(self._temp_dir.path, format_control)
super().__init__(self._temp_dir.path)
class CacheEntry:
@ -211,7 +192,17 @@ class CacheEntry:
self.origin: Optional[DirectUrl] = None
origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
if origin_direct_url_path.exists():
self.origin = DirectUrl.from_json(origin_direct_url_path.read_text())
try:
self.origin = DirectUrl.from_json(
origin_direct_url_path.read_text(encoding="utf-8")
)
except Exception as e:
logger.warning(
"Ignoring invalid cache entry origin file %s for %s (%s)",
origin_direct_url_path,
link.filename,
e,
)
class WheelCache(Cache):
@ -221,14 +212,10 @@ class WheelCache(Cache):
when a certain link is not found in the simple wheel cache first.
"""
def __init__(
self, cache_dir: str, format_control: Optional[FormatControl] = None
) -> None:
if format_control is None:
format_control = FormatControl()
super().__init__(cache_dir, format_control, {"binary"})
self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
self._ephem_cache = EphemWheelCache(format_control)
def __init__(self, cache_dir: str) -> None:
super().__init__(cache_dir)
self._wheel_cache = SimpleWheelCache(cache_dir)
self._ephem_cache = EphemWheelCache()
def get_path_for_link(self, link: Link) -> str:
return self._wheel_cache.get_path_for_link(link)
@ -278,16 +265,26 @@ class WheelCache(Cache):
@staticmethod
def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None:
origin_path = Path(cache_dir) / ORIGIN_JSON_NAME
if origin_path.is_file():
origin = DirectUrl.from_json(origin_path.read_text())
# TODO: use DirectUrl.equivalent when https://github.com/pypa/pip/pull/10564
# is merged.
if origin.url != download_info.url:
if origin_path.exists():
try:
origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8"))
except Exception as e:
logger.warning(
"Origin URL %s in cache entry %s does not match download URL %s. "
"This is likely a pip bug or a cache corruption issue.",
origin.url,
cache_dir,
download_info.url,
"Could not read origin file %s in cache entry (%s). "
"Will attempt to overwrite it.",
origin_path,
e,
)
else:
# TODO: use DirectUrl.equivalent when
# https://github.com/pypa/pip/pull/10564 is merged.
if origin.url != download_info.url:
logger.warning(
"Origin URL %s in cache entry %s does not match download URL "
"%s. This is likely a pip bug or a cache corruption issue. "
"Will overwrite it with the new value.",
origin.url,
cache_dir,
download_info.url,
)
origin_path.write_text(download_info.to_json(), encoding="utf-8")

@ -71,8 +71,9 @@ def autocomplete() -> None:
for opt in subcommand.parser.option_list_all:
if opt.help != optparse.SUPPRESS_HELP:
for opt_str in opt._long_opts + opt._short_opts:
options.append((opt_str, opt.nargs))
options += [
(opt_str, opt.nargs) for opt_str in opt._long_opts + opt._short_opts
]
# filter out previously specified options from available options
prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]

@ -122,6 +122,26 @@ class Command(CommandContextMixIn):
user_log_file=options.log,
)
always_enabled_features = set(options.features_enabled) & set(
cmdoptions.ALWAYS_ENABLED_FEATURES
)
if always_enabled_features:
logger.warning(
"The following features are always enabled: %s. ",
", ".join(sorted(always_enabled_features)),
)
# Make sure that the --python argument isn't specified after the
# subcommand. We can tell, because if --python was specified,
# we should only reach this point if we're running in the created
# subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment
# variable set.
if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
logger.critical(
"The --python option must be placed before the pip subcommand name"
)
sys.exit(ERROR)
# TODO: Try to get these passing down from the command?
# without resorting to os.environ to hold these.
# This also affects isolated builds and it should.
@ -161,7 +181,7 @@ class Command(CommandContextMixIn):
assert isinstance(status, int)
return status
except DiagnosticPipError as exc:
logger.error("[present-rich] %s", exc)
logger.error("%s", exc, extra={"rich": True})
logger.debug("Exception information:", exc_info=True)
return ERROR

@ -92,10 +92,10 @@ def check_dist_restriction(options: Values, check_target: bool = False) -> None:
)
if check_target:
if dist_restriction_set and not options.target_dir:
if not options.dry_run and dist_restriction_set and not options.target_dir:
raise CommandError(
"Can not use any platform or abi specific options unless "
"installing via '--target'"
"installing via '--target' or using '--dry-run'"
)
@ -252,6 +252,19 @@ no_input: Callable[..., Option] = partial(
help="Disable prompting for input.",
)
keyring_provider: Callable[..., Option] = partial(
Option,
"--keyring-provider",
dest="keyring_provider",
choices=["auto", "disabled", "import", "subprocess"],
default="auto",
help=(
"Enable the credential lookup via the keyring library if user input is allowed."
" Specify which mechanism to use [disabled, import, subprocess]."
" (default: disabled)"
),
)
proxy: Callable[..., Option] = partial(
Option,
"--proxy",
@ -657,7 +670,10 @@ def prefer_binary() -> Option:
dest="prefer_binary",
action="store_true",
default=False,
help="Prefer older binary packages over newer source packages.",
help=(
"Prefer binary packages over source packages, even if the "
"source packages are newer."
),
)
@ -770,10 +786,14 @@ def _handle_no_use_pep517(
"""
raise_option_error(parser, option=option, msg=msg)
# If user doesn't wish to use pep517, we check if setuptools is installed
# If user doesn't wish to use pep517, we check if setuptools and wheel are installed
# and raise error if it is not.
if not importlib.util.find_spec("setuptools"):
msg = "It is not possible to use --no-use-pep517 without setuptools installed."
packages = ("setuptools", "wheel")
if not all(importlib.util.find_spec(package) for package in packages):
msg = (
f"It is not possible to use --no-use-pep517 "
f"without {' and '.join(packages)} installed."
)
raise_option_error(parser, option=option, msg=msg)
# Otherwise, --no-use-pep517 was passed via the command-line.
@ -806,16 +826,23 @@ def _handle_config_settings(
) -> None:
key, sep, val = value.partition("=")
if sep != "=":
parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") # noqa
parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL")
dest = getattr(parser.values, option.dest)
if dest is None:
dest = {}
setattr(parser.values, option.dest, dest)
dest[key] = val
if key in dest:
if isinstance(dest[key], list):
dest[key].append(val)
else:
dest[key] = [dest[key], val]
else:
dest[key] = val
config_settings: Callable[..., Option] = partial(
Option,
"-C",
"--config-settings",
dest="config_settings",
type=str,
@ -827,17 +854,6 @@ config_settings: Callable[..., Option] = partial(
"to pass multiple keys to the backend.",
)
install_options: Callable[..., Option] = partial(
Option,
"--install-option",
dest="install_options",
action="append",
metavar="options",
help="This option is deprecated. Using this option with location-changing "
"options may cause unexpected behavior. "
"Use pip-level options like --user, --prefix, --root, and --target.",
)
build_options: Callable[..., Option] = partial(
Option,
"--build-option",
@ -905,13 +921,13 @@ def _handle_merge_hash(
algo, digest = value.split(":", 1)
except ValueError:
parser.error(
"Arguments to {} must be a hash name " # noqa
"Arguments to {} must be a hash name "
"followed by a value, like --hash=sha256:"
"abcde...".format(opt_str)
)
if algo not in STRONG_HASHES:
parser.error(
"Allowed hash algorithms for {} are {}.".format( # noqa
"Allowed hash algorithms for {} are {}.".format(
opt_str, ", ".join(STRONG_HASHES)
)
)
@ -981,6 +997,11 @@ no_python_version_warning: Callable[..., Option] = partial(
)
# Features that are now always on. A warning is printed if they are used.
ALWAYS_ENABLED_FEATURES = [
"no-binary-enable-wheel-cache", # always on since 23.1
]
use_new_feature: Callable[..., Option] = partial(
Option,
"--use-feature",
@ -991,8 +1012,8 @@ use_new_feature: Callable[..., Option] = partial(
choices=[
"fast-deps",
"truststore",
"no-binary-enable-wheel-cache",
],
]
+ ALWAYS_ENABLED_FEATURES,
help="Enable new functionality, that may be backward incompatible.",
)
@ -1027,6 +1048,7 @@ general_group: Dict[str, Any] = {
quiet,
log,
no_input,
keyring_provider,
proxy,
retries,
timeout,

@ -4,6 +4,7 @@ import locale
import logging
import os
import sys
import warnings
from typing import List, Optional
from pip._internal.cli.autocompletion import autocomplete
@ -46,6 +47,14 @@ def main(args: Optional[List[str]] = None) -> int:
if args is None:
args = sys.argv[1:]
# Suppress the pkg_resources deprecation warning
# Note - we use a module of .*pkg_resources to cover
# the normal case (pip._vendor.pkg_resources) and the
# devendored case (a bare pkg_resources)
warnings.filterwarnings(
action="ignore", category=DeprecationWarning, module=".*pkg_resources"
)
# Configure our deprecation warnings to be sent through loggers
deprecation.install_warning_logger()

@ -229,7 +229,7 @@ class ConfigOptionParser(CustomOptionParser):
val = strtobool(val)
except ValueError:
self.error(
"{} is not a valid value for {} option, " # noqa
"{} is not a valid value for {} option, "
"please specify a boolean value like yes/no, "
"true/false or 1/0 instead.".format(val, key)
)
@ -240,7 +240,7 @@ class ConfigOptionParser(CustomOptionParser):
val = int(val)
if not isinstance(val, int) or val < 0:
self.error(
"{} is not a valid value for {} option, " # noqa
"{} is not a valid value for {} option, "
"please instead specify either a non-negative integer "
"or a boolean value like yes/no or false/true "
"which is equivalent to 1/0.".format(val, key)

@ -58,12 +58,9 @@ def _create_truststore_ssl_context() -> Optional["SSLContext"]:
return None
try:
import truststore
except ImportError:
raise CommandError(
"To use the truststore feature, 'truststore' must be installed into "
"pip's current environment."
)
from pip._vendor import truststore
except ImportError as e:
raise CommandError(f"The truststore feature is unavailable: {e}")
return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
@ -123,7 +120,7 @@ class SessionCommandMixin(CommandContextMixIn):
ssl_context = None
session = PipSession(
cache=os.path.join(cache_dir, "http") if cache_dir else None,
cache=os.path.join(cache_dir, "http-v2") if cache_dir else None,
retries=retries if retries is not None else options.retries,
trusted_hosts=options.trusted_hosts,
index_urls=self._get_index_urls(options),
@ -151,6 +148,7 @@ class SessionCommandMixin(CommandContextMixIn):
# Determine if we can prompt the user for authentication or not
session.auth.prompting = not options.no_input
session.auth.keyring_provider = options.keyring_provider
return session
@ -267,7 +265,7 @@ class RequirementCommand(IndexGroupCommand):
if "legacy-resolver" in options.deprecated_features_enabled:
return "legacy"
return "2020-resolver"
return "resolvelib"
@classmethod
def make_requirement_preparer(
@ -286,9 +284,10 @@ class RequirementCommand(IndexGroupCommand):
"""
temp_build_dir_path = temp_build_dir.path
assert temp_build_dir_path is not None
legacy_resolver = False
resolver_variant = cls.determine_resolver_variant(options)
if resolver_variant == "2020-resolver":
if resolver_variant == "resolvelib":
lazy_wheel = "fast-deps" in options.features_enabled
if lazy_wheel:
logger.warning(
@ -299,6 +298,7 @@ class RequirementCommand(IndexGroupCommand):
"production."
)
else:
legacy_resolver = True
lazy_wheel = False
if "fast-deps" in options.features_enabled:
logger.warning(
@ -319,6 +319,7 @@ class RequirementCommand(IndexGroupCommand):
use_user_site=use_user_site,
lazy_wheel=lazy_wheel,
verbosity=verbosity,
legacy_resolver=legacy_resolver,
)
@classmethod
@ -343,13 +344,12 @@ class RequirementCommand(IndexGroupCommand):
install_req_from_req_string,
isolated=options.isolated_mode,
use_pep517=use_pep517,
config_settings=getattr(options, "config_settings", None),
)
resolver_variant = cls.determine_resolver_variant(options)
# The long import name and duplicated invocation is needed to convince
# Mypy into correctly typechecking. Otherwise it would complain the
# "Resolver" class being redefined.
if resolver_variant == "2020-resolver":
if resolver_variant == "resolvelib":
import pip._internal.resolution.resolvelib.resolver
return pip._internal.resolution.resolvelib.resolver.Resolver(
@ -410,7 +410,7 @@ class RequirementCommand(IndexGroupCommand):
for req in args:
req_to_add = install_req_from_line(
req,
None,
comes_from=None,
isolated=options.isolated_mode,
use_pep517=options.use_pep517,
user_supplied=True,
@ -438,6 +438,9 @@ class RequirementCommand(IndexGroupCommand):
isolated=options.isolated_mode,
use_pep517=options.use_pep517,
user_supplied=True,
config_settings=parsed_req.options.get("config_settings")
if parsed_req.options
else None,
)
requirements.append(req_to_add)

@ -3,10 +3,10 @@ import textwrap
from optparse import Values
from typing import Any, List
import pip._internal.utils.filesystem as filesystem
from pip._internal.cli.base_command import Command
from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.exceptions import CommandError, PipError
from pip._internal.utils import filesystem
from pip._internal.utils.logging import getLogger
logger = getLogger(__name__)
@ -37,7 +37,6 @@ class CacheCommand(Command):
"""
def add_options(self) -> None:
self.cmd_opts.add_option(
"--format",
action="store",
@ -94,24 +93,30 @@ class CacheCommand(Command):
num_http_files = len(self._find_http_files(options))
num_packages = len(self._find_wheels(options, "*"))
http_cache_location = self._cache_dir(options, "http")
http_cache_location = self._cache_dir(options, "http-v2")
old_http_cache_location = self._cache_dir(options, "http")
wheels_cache_location = self._cache_dir(options, "wheels")
http_cache_size = filesystem.format_directory_size(http_cache_location)
http_cache_size = filesystem.format_size(
filesystem.directory_size(http_cache_location)
+ filesystem.directory_size(old_http_cache_location)
)
wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
message = (
textwrap.dedent(
"""
Package index page cache location: {http_cache_location}
Package index page cache location (pip v23.3+): {http_cache_location}
Package index page cache location (older pips): {old_http_cache_location}
Package index page cache size: {http_cache_size}
Number of HTTP files: {num_http_files}
Locally built wheels location: {wheels_cache_location}
Locally built wheels size: {wheels_cache_size}
Number of locally built wheels: {package_count}
"""
""" # noqa: E501
)
.format(
http_cache_location=http_cache_location,
old_http_cache_location=old_http_cache_location,
http_cache_size=http_cache_size,
num_http_files=num_http_files,
wheels_cache_location=wheels_cache_location,
@ -152,14 +157,8 @@ class CacheCommand(Command):
logger.info("\n".join(sorted(results)))
def format_for_abspath(self, files: List[str]) -> None:
if not files:
return
results = []
for filename in files:
results.append(filename)
logger.info("\n".join(sorted(results)))
if files:
logger.info("\n".join(sorted(files)))
def remove_cache_items(self, options: Values, args: List[Any]) -> None:
if len(args) > 1:
@ -196,8 +195,11 @@ class CacheCommand(Command):
return os.path.join(options.cache_dir, subdir)
def _find_http_files(self, options: Values) -> List[str]:
http_dir = self._cache_dir(options, "http")
return filesystem.find_files(http_dir, "*")
old_http_dir = self._cache_dir(options, "http")
new_http_dir = self._cache_dir(options, "http-v2")
return filesystem.find_files(old_http_dir, "*") + filesystem.find_files(
new_http_dir, "*"
)
def _find_wheels(self, options: Values, pattern: str) -> List[str]:
wheel_dir = self._cache_dir(options, "wheels")

@ -7,6 +7,7 @@ from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.operations.check import (
check_package_set,
create_package_set_from_installed,
warn_legacy_versions_and_specifiers,
)
from pip._internal.utils.misc import write_output
@ -20,8 +21,8 @@ class CheckCommand(Command):
%prog [options]"""
def run(self, options: Values, args: List[str]) -> int:
package_set, parsing_probs = create_package_set_from_installed()
warn_legacy_versions_and_specifiers(package_set)
missing, conflicting = check_package_set(package_set)
for project_name in missing:

@ -22,15 +22,19 @@ COMPLETION_SCRIPTS = {
complete -o default -F _pip_completion {prog}
""",
"zsh": """
function _pip_completion {{
local words cword
read -Ac words
read -cn cword
reply=( $( COMP_WORDS="$words[*]" \\
COMP_CWORD=$(( cword-1 )) \\
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ))
#compdef -P pip[0-9.]#
__pip() {{
compadd $( COMP_WORDS="$words[*]" \\
COMP_CWORD=$((CURRENT-1)) \\
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )
}}
compctl -K _pip_completion {prog}
if [[ $zsh_eval_context[-1] == loadautofunc ]]; then
# autoload from fpath, call function directly
__pip "$@"
else
# eval/source/. command, register function for later
compdef __pip -P 'pip[0-9.]#'
fi
""",
"fish": """
function __fish_complete_pip

@ -46,22 +46,29 @@ def create_vendor_txt_map() -> Dict[str, str]:
return dict(line.split("==", 1) for line in lines)
def get_module_from_module_name(module_name: str) -> ModuleType:
def get_module_from_module_name(module_name: str) -> Optional[ModuleType]:
# Module name can be uppercase in vendor.txt for some reason...
module_name = module_name.lower().replace("-", "_")
# PATCH: setuptools is actually only pkg_resources.
if module_name == "setuptools":
module_name = "pkg_resources"
__import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
return getattr(pip._vendor, module_name)
try:
__import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
return getattr(pip._vendor, module_name)
except ImportError:
# We allow 'truststore' to fail to import due
# to being unavailable on Python 3.9 and earlier.
if module_name == "truststore" and sys.version_info < (3, 10):
return None
raise
def get_vendor_version_from_module(module_name: str) -> Optional[str]:
module = get_module_from_module_name(module_name)
version = getattr(module, "__version__", None)
if not version:
if module and not version:
# Try to find version in debundled module info.
assert module.__file__ is not None
env = get_environment([os.path.dirname(module.__file__)])
@ -105,7 +112,7 @@ def show_tags(options: Values) -> None:
tag_limit = 10
target_python = make_target_python(options)
tags = target_python.get_tags()
tags = target_python.get_sorted_tags()
# Display the target options that were explicitly provided.
formatted_target = target_python.format_given()
@ -134,10 +141,7 @@ def show_tags(options: Values) -> None:
def ca_bundle_info(config: Configuration) -> str:
levels = set()
for key, _ in config.items():
levels.add(key.split(".")[0])
levels = {key.split(".", 1)[0] for key, _ in config.items()}
if not levels:
return "Not specified"

@ -8,10 +8,7 @@ from pip._internal.cli.cmdoptions import make_target_python
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
from pip._internal.cli.status_codes import SUCCESS
from pip._internal.operations.build.build_tracker import get_build_tracker
from pip._internal.req.req_install import (
LegacySetupPyOptionsCheckMode,
check_legacy_setup_py_options,
)
from pip._internal.req.req_install import check_legacy_setup_py_options
from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
from pip._internal.utils.temp_dir import TempDirectory
@ -79,7 +76,6 @@ class DownloadCommand(RequirementCommand):
@with_cleanup
def run(self, options: Values, args: List[str]) -> int:
options.ignore_installed = True
# editable doesn't really make sense for `pip download`, but the bowels
# of the RequirementSet code require that property.
@ -109,9 +105,7 @@ class DownloadCommand(RequirementCommand):
)
reqs = self.get_requirements(args, options, finder, session)
check_legacy_setup_py_options(
options, reqs, LegacySetupPyOptionsCheckMode.DOWNLOAD
)
check_legacy_setup_py_options(options, reqs)
preparer = self.make_requirement_preparer(
temp_build_dir=directory,
@ -143,6 +137,10 @@ class DownloadCommand(RequirementCommand):
assert req.name is not None
preparer.save_linked_requirement(req)
downloaded.append(req.name)
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
requirement_set.warn_legacy_versions_and_specifiers()
if downloaded:
write_output("Successfully downloaded %s", " ".join(downloaded))

@ -1,6 +1,6 @@
import sys
from optparse import Values
from typing import List
from typing import AbstractSet, List
from pip._internal.cli import cmdoptions
from pip._internal.cli.base_command import Command
@ -8,7 +8,18 @@ from pip._internal.cli.status_codes import SUCCESS
from pip._internal.operations.freeze import freeze
from pip._internal.utils.compat import stdlib_pkgs
DEV_PKGS = {"pip", "setuptools", "distribute", "wheel"}
def _should_suppress_build_backends() -> bool:
return sys.version_info < (3, 12)
def _dev_pkgs() -> AbstractSet[str]:
pkgs = {"pip"}
if _should_suppress_build_backends():
pkgs |= {"setuptools", "distribute", "wheel"}
return pkgs
class FreezeCommand(Command):
@ -61,7 +72,7 @@ class FreezeCommand(Command):
action="store_true",
help=(
"Do not skip these packages in the output:"
" {}".format(", ".join(DEV_PKGS))
" {}".format(", ".join(_dev_pkgs()))
),
)
self.cmd_opts.add_option(
@ -77,7 +88,7 @@ class FreezeCommand(Command):
def run(self, options: Values, args: List[str]) -> int:
skip = set(stdlib_pkgs)
if not options.freeze_all:
skip.update(DEV_PKGS)
skip.update(_dev_pkgs())
if options.excludes:
skip.update(options.excludes)

@ -5,9 +5,8 @@ import os
import shutil
import site
from optparse import SUPPRESS_HELP, Values
from typing import Iterable, List, Optional
from typing import List, Optional
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.rich import print_json
from pip._internal.cache import WheelCache
@ -22,22 +21,15 @@ from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.exceptions import CommandError, InstallationError
from pip._internal.locations import get_scheme
from pip._internal.metadata import get_environment
from pip._internal.models.format_control import FormatControl
from pip._internal.models.installation_report import InstallationReport
from pip._internal.operations.build.build_tracker import get_build_tracker
from pip._internal.operations.check import ConflictDetails, check_install_conflicts
from pip._internal.req import install_given_reqs
from pip._internal.req.req_install import (
InstallRequirement,
LegacySetupPyOptionsCheckMode,
check_legacy_setup_py_options,
)
from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.deprecation import (
LegacyInstallReasonFailedBdistWheel,
deprecated,
)
from pip._internal.utils.distutils_args import parse_distutils_args
from pip._internal.utils.filesystem import test_writable_dir
from pip._internal.utils.logging import getLogger
from pip._internal.utils.misc import (
@ -52,26 +44,11 @@ from pip._internal.utils.virtualenv import (
running_under_virtualenv,
virtualenv_no_global,
)
from pip._internal.wheel_builder import (
BdistWheelAllowedPredicate,
build,
should_build_for_install_command,
)
from pip._internal.wheel_builder import build, should_build_for_install_command
logger = getLogger(__name__)
def get_check_bdist_wheel_allowed(
format_control: FormatControl,
) -> BdistWheelAllowedPredicate:
def check_binary_allowed(req: InstallRequirement) -> bool:
canonical_name = canonicalize_name(req.name or "")
allowed_formats = format_control.get_allowed_formats(canonical_name)
return "binary" in allowed_formats
return check_binary_allowed
class InstallCommand(RequirementCommand):
"""
Install packages from:
@ -156,7 +133,12 @@ class InstallCommand(RequirementCommand):
default=None,
help=(
"Installation prefix where lib, bin and other top-level "
"folders are placed"
"folders are placed. Note that the resulting installation may "
"contain scripts and other resources which reference the "
"Python interpreter of pip, and not that of ``--prefix``. "
"See also the ``--python`` option if the intention is to "
"install packages into another (possibly pip-free) "
"environment."
),
)
@ -218,7 +200,6 @@ class InstallCommand(RequirementCommand):
self.cmd_opts.add_option(cmdoptions.override_externally_managed())
self.cmd_opts.add_option(cmdoptions.config_settings())
self.cmd_opts.add_option(cmdoptions.install_options())
self.cmd_opts.add_option(cmdoptions.global_options())
self.cmd_opts.add_option(
@ -309,8 +290,6 @@ class InstallCommand(RequirementCommand):
cmdoptions.check_dist_restriction(options, check_target=True)
install_options = options.install_options or []
logger.verbose("Using %s", get_pip_version())
options.use_user_site = decide_user_install(
options.use_user_site,
@ -361,28 +340,9 @@ class InstallCommand(RequirementCommand):
try:
reqs = self.get_requirements(args, options, finder, session)
check_legacy_setup_py_options(
options, reqs, LegacySetupPyOptionsCheckMode.INSTALL
)
check_legacy_setup_py_options(options, reqs)
if "no-binary-enable-wheel-cache" in options.features_enabled:
# TODO: remove format_control from WheelCache when the deprecation cycle
# is over
wheel_cache = WheelCache(options.cache_dir)
else:
if options.format_control.no_binary:
deprecated(
reason=(
"--no-binary currently disables reading from "
"the cache of locally built wheels. In the future "
"--no-binary will not influence the wheel cache."
),
replacement="to use the --no-cache-dir option",
feature_flag="no-binary-enable-wheel-cache",
issue=11453,
gone_in="23.1",
)
wheel_cache = WheelCache(options.cache_dir, options.format_control)
wheel_cache = WheelCache(options.cache_dir)
# Only when installing is it permitted to use PEP 660.
# In other circumstances (pip wheel, pip download) we generate
@ -390,8 +350,6 @@ class InstallCommand(RequirementCommand):
for req in reqs:
req.permit_editable_wheels = True
reject_location_related_install_options(reqs, options.install_options)
preparer = self.make_requirement_preparer(
temp_build_dir=directory,
options=options,
@ -429,6 +387,9 @@ class InstallCommand(RequirementCommand):
json.dump(report.to_dict(), f, indent=2, ensure_ascii=False)
if options.dry_run:
# In non dry-run mode, the legacy versions and specifiers check
# will be done as part of conflict detection.
requirement_set.warn_legacy_versions_and_specifiers()
would_install_items = sorted(
(r.metadata["name"], r.metadata["version"])
for r in requirement_set.requirements_to_install
@ -450,14 +411,10 @@ class InstallCommand(RequirementCommand):
modifying_pip = pip_req.satisfied_by is None
protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)
check_bdist_wheel_allowed = get_check_bdist_wheel_allowed(
finder.format_control
)
reqs_to_build = [
r
for r in requirement_set.requirements.values()
if should_build_for_install_command(r, check_bdist_wheel_allowed)
if should_build_for_install_command(r)
]
_, build_failures = build(
@ -468,26 +425,14 @@ class InstallCommand(RequirementCommand):
global_options=global_options,
)
# If we're using PEP 517, we cannot do a legacy setup.py install
# so we fail here.
pep517_build_failure_names: List[str] = [
r.name for r in build_failures if r.use_pep517 # type: ignore
]
if pep517_build_failure_names:
if build_failures:
raise InstallationError(
"Could not build wheels for {}, which is required to "
"install pyproject.toml-based projects".format(
", ".join(pep517_build_failure_names)
", ".join(r.name for r in build_failures) # type: ignore
)
)
# For now, we just warn about failures building legacy
# requirements, as we'll fall through to a setup.py install for
# those.
for r in build_failures:
if not r.use_pep517:
r.legacy_install_reason = LegacyInstallReasonFailedBdistWheel
to_install = resolver.get_installation_order(requirement_set)
# Check for conflicts in the package set we're installing.
@ -506,7 +451,6 @@ class InstallCommand(RequirementCommand):
installed = install_given_reqs(
to_install,
install_options,
global_options,
root=options.root_path,
home=target_temp_dir_path,
@ -557,7 +501,7 @@ class InstallCommand(RequirementCommand):
show_traceback,
options.use_user_site,
)
logger.error(message, exc_info=show_traceback) # noqa
logger.error(message, exc_info=show_traceback)
return ERROR
@ -651,7 +595,7 @@ class InstallCommand(RequirementCommand):
"source of the following dependency conflicts."
)
else:
assert resolver_variant == "2020-resolver"
assert resolver_variant == "resolvelib"
parts.append(
"pip's dependency resolver does not currently take into account "
"all the packages that are installed. This behaviour is the "
@ -684,7 +628,7 @@ class InstallCommand(RequirementCommand):
requirement=req,
dep_name=dep_name,
dep_version=dep_version,
you=("you" if resolver_variant == "2020-resolver" else "you'll"),
you=("you" if resolver_variant == "resolvelib" else "you'll"),
)
parts.append(message)
@ -777,45 +721,6 @@ def decide_user_install(
return True
def reject_location_related_install_options(
requirements: List[InstallRequirement], options: Optional[List[str]]
) -> None:
"""If any location-changing --install-option arguments were passed for
requirements or on the command-line, then show a deprecation warning.
"""
def format_options(option_names: Iterable[str]) -> List[str]:
return ["--{}".format(name.replace("_", "-")) for name in option_names]
offenders = []
for requirement in requirements:
install_options = requirement.install_options
location_options = parse_distutils_args(install_options)
if location_options:
offenders.append(
"{!r} from {}".format(
format_options(location_options.keys()), requirement
)
)
if options:
location_options = parse_distutils_args(options)
if location_options:
offenders.append(
"{!r} from command line".format(format_options(location_options.keys()))
)
if not offenders:
return
raise CommandError(
"Location-changing options found in --install-option: {}."
" This is unsupported, use pip-level options like --user,"
" --prefix, --root, and --target instead.".format("; ".join(offenders))
)
def create_os_error_message(
error: OSError, show_traceback: bool, using_user_site: bool
) -> str:

@ -103,7 +103,10 @@ class ListCommand(IndexGroupCommand):
dest="list_format",
default="columns",
choices=("columns", "freeze", "json"),
help="Select the output format among: columns (default), freeze, or json",
help=(
"Select the output format among: columns (default), freeze, or json. "
"The 'freeze' format cannot be used with the --outdated option."
),
)
self.cmd_opts.add_option(
@ -157,7 +160,7 @@ class ListCommand(IndexGroupCommand):
if options.outdated and options.list_format == "freeze":
raise CommandError(
"List format 'freeze' can not be used with the --outdated option."
"List format 'freeze' cannot be used with the --outdated option."
)
cmdoptions.check_list_path_option(options)
@ -294,7 +297,7 @@ class ListCommand(IndexGroupCommand):
# Create and add a separator.
if len(data) > 0:
pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes)))
pkg_strings.insert(1, " ".join("-" * x for x in sizes))
for val in pkg_strings:
write_output(val)

@ -12,10 +12,8 @@ from pip._internal.exceptions import CommandError
from pip._internal.operations.build.build_tracker import get_build_tracker
from pip._internal.req.req_install import (
InstallRequirement,
LegacySetupPyOptionsCheckMode,
check_legacy_setup_py_options,
)
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.misc import ensure_dir, normalize_path
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.wheel_builder import build, should_build_for_wheel_command
@ -44,7 +42,6 @@ class WheelCommand(RequirementCommand):
%prog [options] <archive url/path> ..."""
def add_options(self) -> None:
self.cmd_opts.add_option(
"-w",
"--wheel-dir",
@ -108,7 +105,6 @@ class WheelCommand(RequirementCommand):
session = self.get_default_session(options)
finder = self._build_package_finder(options, session)
wheel_cache = WheelCache(options.cache_dir, options.format_control)
options.wheel_dir = normalize_path(options.wheel_dir)
ensure_dir(options.wheel_dir)
@ -122,28 +118,9 @@ class WheelCommand(RequirementCommand):
)
reqs = self.get_requirements(args, options, finder, session)
check_legacy_setup_py_options(
options, reqs, LegacySetupPyOptionsCheckMode.WHEEL
)
check_legacy_setup_py_options(options, reqs)
if "no-binary-enable-wheel-cache" in options.features_enabled:
# TODO: remove format_control from WheelCache when the deprecation cycle
# is over
wheel_cache = WheelCache(options.cache_dir)
else:
if options.format_control.no_binary:
deprecated(
reason=(
"--no-binary currently disables reading from "
"the cache of locally built wheels. In the future "
"--no-binary will not influence the wheel cache."
),
replacement="to use the --no-cache-dir option",
feature_flag="no-binary-enable-wheel-cache",
issue=11453,
gone_in="23.1",
)
wheel_cache = WheelCache(options.cache_dir, options.format_control)
wheel_cache = WheelCache(options.cache_dir)
preparer = self.make_requirement_preparer(
temp_build_dir=directory,
@ -176,6 +153,9 @@ class WheelCommand(RequirementCommand):
elif should_build_for_wheel_command(req):
reqs_to_build.append(req)
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
requirement_set.warn_legacy_versions_and_specifiers()
# build wheels
build_successes, build_failures = build(
reqs_to_build,

@ -210,8 +210,15 @@ class Configuration:
# Ensure directory exists.
ensure_dir(os.path.dirname(fname))
with open(fname, "w") as f:
parser.write(f)
# Ensure directory's permission(need to be writeable)
try:
with open(fname, "w") as f:
parser.write(f)
except OSError as error:
raise ConfigurationError(
f"An error occurred while writing to the configuration file "
f"{fname}: {error}"
)
#
# Private routines

@ -1,4 +1,5 @@
import abc
from typing import Optional
from pip._internal.index.package_finder import PackageFinder
from pip._internal.metadata.base import BaseDistribution
@ -19,12 +20,23 @@ class AbstractDistribution(metaclass=abc.ABCMeta):
- we must be able to create a Distribution object exposing the
above metadata.
- if we need to do work in the build tracker, we must be able to generate a unique
string to identify the requirement in the build tracker.
"""
def __init__(self, req: InstallRequirement) -> None:
super().__init__()
self.req = req
@abc.abstractproperty
def build_tracker_id(self) -> Optional[str]:
"""A string that uniquely identifies this requirement to the build tracker.
If None, then this dist has no work to do in the build tracker, and
``.prepare_distribution_metadata()`` will not be called."""
raise NotImplementedError()
@abc.abstractmethod
def get_metadata_distribution(self) -> BaseDistribution:
raise NotImplementedError()

@ -1,3 +1,5 @@
from typing import Optional
from pip._internal.distributions.base import AbstractDistribution
from pip._internal.index.package_finder import PackageFinder
from pip._internal.metadata import BaseDistribution
@ -10,6 +12,10 @@ class InstalledDistribution(AbstractDistribution):
been computed.
"""
@property
def build_tracker_id(self) -> Optional[str]:
return None
def get_metadata_distribution(self) -> BaseDistribution:
assert self.req.satisfied_by is not None, "not actually installed"
return self.req.satisfied_by

@ -1,5 +1,5 @@
import logging
from typing import Iterable, Set, Tuple
from typing import Iterable, Optional, Set, Tuple
from pip._internal.build_env import BuildEnvironment
from pip._internal.distributions.base import AbstractDistribution
@ -18,6 +18,12 @@ class SourceDistribution(AbstractDistribution):
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
"""
@property
def build_tracker_id(self) -> Optional[str]:
"""Identify this requirement uniquely by its link."""
assert self.req.link
return self.req.link.url_without_fragment
def get_metadata_distribution(self) -> BaseDistribution:
return self.req.get_dist()

@ -1,3 +1,5 @@
from typing import Optional
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.distributions.base import AbstractDistribution
@ -15,6 +17,10 @@ class WheelDistribution(AbstractDistribution):
This does not need any preparation as wheels can be directly unpacked.
"""
@property
def build_tracker_id(self) -> Optional[str]:
return None
def get_metadata_distribution(self) -> BaseDistribution:
"""Loads the metadata from the wheel file into memory and returns a
Distribution that uses it, not relying on the wheel file or

@ -361,20 +361,6 @@ class MetadataInconsistent(InstallationError):
)
class LegacyInstallFailure(DiagnosticPipError):
"""Error occurred while executing `setup.py install`"""
reference = "legacy-install-failure"
def __init__(self, package_details: str) -> None:
super().__init__(
message="Encountered error while trying to install package.",
context=package_details,
hint_stmt="See above for output from the failure.",
note_stmt="This is an issue with the package mentioned above, not pip.",
)
class InstallationSubprocessError(DiagnosticPipError, InstallationError):
"""A subprocess call failed."""
@ -558,7 +544,7 @@ class HashMissing(HashError):
# so the output can be directly copied into the requirements file.
package = (
self.req.original_link
if self.req.original_link
if self.req.is_direct
# In case someone feeds something downright stupid
# to InstallRequirement's constructor.
else getattr(self.req, "req", None)

@ -198,7 +198,7 @@ class LinkEvaluator:
reason = f"wrong project name (not {self.project_name})"
return (LinkType.different_project, reason)
supported_tags = self._target_python.get_tags()
supported_tags = self._target_python.get_unsorted_tags()
if not wheel.supported(supported_tags):
# Include the wheel's tags in the reason string to
# simplify troubleshooting compatibility issues.
@ -414,7 +414,7 @@ class CandidateEvaluator:
if specifier is None:
specifier = specifiers.SpecifierSet()
supported_tags = target_python.get_tags()
supported_tags = target_python.get_sorted_tags()
return cls(
project_name=project_name,

@ -171,7 +171,6 @@ def build_source(
expand_dir: bool,
cache_link_parsing: bool,
) -> Tuple[Optional[str], Optional[LinkSource]]:
path: Optional[str] = None
url: Optional[str] = None
if os.path.exists(location): # Is a local path.

@ -89,7 +89,7 @@ def distutils_scheme(
# finalize_options(); we only want to override here if the user
# has explicitly requested it hence going back to the config
if "install_lib" in d.get_option_dict("install"):
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
scheme.update({"purelib": i.install_lib, "platlib": i.install_lib})
if running_under_virtualenv():
if home:

@ -9,7 +9,7 @@ from pip._internal.utils.misc import strtobool
from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel
if TYPE_CHECKING:
from typing import Protocol
from typing import Literal, Protocol
else:
Protocol = object
@ -50,6 +50,7 @@ def _should_use_importlib_metadata() -> bool:
class Backend(Protocol):
NAME: 'Literal["importlib", "pkg_resources"]'
Distribution: Type[BaseDistribution]
Environment: Type[BaseEnvironment]

@ -24,7 +24,7 @@ from typing import (
from pip._vendor.packaging.requirements import Requirement
from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
from pip._vendor.packaging.utils import NormalizedName
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._vendor.packaging.version import LegacyVersion, Version
from pip._internal.exceptions import NoneMetadataError
@ -37,7 +37,6 @@ from pip._internal.models.direct_url import (
from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here.
from pip._internal.utils.egg_link import egg_link_path_from_sys_path
from pip._internal.utils.misc import is_local, normalize_path
from pip._internal.utils.packaging import safe_extra
from pip._internal.utils.urls import url_to_path
from ._json import msg_to_json
@ -460,6 +459,19 @@ class BaseDistribution(Protocol):
For modern .dist-info distributions, this is the collection of
"Provides-Extra:" entries in distribution metadata.
The return value of this function is not particularly useful other than
display purposes due to backward compatibility issues and the extra
names being poorly normalized prior to PEP 685. If you want to perform
logic operations on extras, use :func:`is_extra_provided` instead.
"""
raise NotImplementedError()
def is_extra_provided(self, extra: str) -> bool:
"""Check whether an extra is provided by this distribution.
This is needed mostly for compatibility issues with pkg_resources not
following the extra normalization rules defined in PEP 685.
"""
raise NotImplementedError()
@ -537,10 +549,11 @@ class BaseDistribution(Protocol):
"""Get extras from the egg-info directory."""
known_extras = {""}
for entry in self._iter_requires_txt_entries():
if entry.extra in known_extras:
extra = canonicalize_name(entry.extra)
if extra in known_extras:
continue
known_extras.add(entry.extra)
yield entry.extra
known_extras.add(extra)
yield extra
def _iter_egg_info_dependencies(self) -> Iterable[str]:
"""Get distribution dependencies from the egg-info directory.
@ -556,10 +569,11 @@ class BaseDistribution(Protocol):
all currently available PEP 517 backends, although not standardized.
"""
for entry in self._iter_requires_txt_entries():
if entry.extra and entry.marker:
marker = f'({entry.marker}) and extra == "{safe_extra(entry.extra)}"'
elif entry.extra:
marker = f'extra == "{safe_extra(entry.extra)}"'
extra = canonicalize_name(entry.extra)
if extra and entry.marker:
marker = f'({entry.marker}) and extra == "{extra}"'
elif extra:
marker = f'extra == "{extra}"'
elif entry.marker:
marker = entry.marker
else:

@ -1,4 +1,6 @@
from ._dists import Distribution
from ._envs import Environment
__all__ = ["Distribution", "Environment"]
__all__ = ["NAME", "Distribution", "Environment"]
NAME = "importlib"

@ -27,7 +27,6 @@ from pip._internal.metadata.base import (
Wheel,
)
from pip._internal.utils.misc import normalize_path
from pip._internal.utils.packaging import safe_extra
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
@ -208,12 +207,16 @@ class Distribution(BaseDistribution):
return cast(email.message.Message, self._dist.metadata)
def iter_provided_extras(self) -> Iterable[str]:
return (
safe_extra(extra) for extra in self.metadata.get_all("Provides-Extra", [])
return self.metadata.get_all("Provides-Extra", [])
def is_extra_provided(self, extra: str) -> bool:
return any(
canonicalize_name(provided_extra) == canonicalize_name(extra)
for provided_extra in self.metadata.get_all("Provides-Extra", [])
)
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
contexts: Sequence[Dict[str, str]] = [{"extra": safe_extra(e)} for e in extras]
contexts: Sequence[Dict[str, str]] = [{"extra": e} for e in extras]
for req_string in self.metadata.get_all("Requires-Dist", []):
req = Requirement(req_string)
if not req.marker:

@ -151,7 +151,8 @@ def _emit_egg_deprecation(location: Optional[str]) -> None:
deprecated(
reason=f"Loading egg at {location} is deprecated.",
replacement="to use pip for package installation.",
gone_in=None,
gone_in="24.3",
issue=12330,
)
@ -174,7 +175,7 @@ class Environment(BaseEnvironment):
for location in self._paths:
yield from finder.find(location)
for dist in finder.find_eggs(location):
# _emit_egg_deprecation(dist.location) # TODO: Enable this.
_emit_egg_deprecation(dist.location)
yield dist
# This must go last because that's how pkg_resources tie-breaks.
yield from finder.find_linked(location)

@ -24,8 +24,12 @@ from .base import (
Wheel,
)
__all__ = ["NAME", "Distribution", "Environment"]
logger = logging.getLogger(__name__)
NAME = "pkg_resources"
class EntryPoint(NamedTuple):
name: str
@ -212,12 +216,16 @@ class Distribution(BaseDistribution):
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
if extras: # pkg_resources raises on invalid extras, so we sanitize.
extras = frozenset(extras).intersection(self._dist.extras)
extras = frozenset(pkg_resources.safe_extra(e) for e in extras)
extras = extras.intersection(self._dist.extras)
return self._dist.requires(extras)
def iter_provided_extras(self) -> Iterable[str]:
return self._dist.extras
def is_extra_provided(self, extra: str) -> bool:
return pkg_resources.safe_extra(extra) in self._dist.extras
class Environment(BaseEnvironment):
def __init__(self, ws: pkg_resources.WorkingSet) -> None:

@ -105,22 +105,31 @@ class ArchiveInfo:
hash: Optional[str] = None,
hashes: Optional[Dict[str, str]] = None,
) -> None:
if hash is not None:
# set hashes before hash, since the hash setter will further populate hashes
self.hashes = hashes
self.hash = hash
@property
def hash(self) -> Optional[str]:
return self._hash
@hash.setter
def hash(self, value: Optional[str]) -> None:
if value is not None:
# Auto-populate the hashes key to upgrade to the new format automatically.
# We don't back-populate the legacy hash key.
# We don't back-populate the legacy hash key from hashes.
try:
hash_name, hash_value = hash.split("=", 1)
hash_name, hash_value = value.split("=", 1)
except ValueError:
raise DirectUrlValidationError(
f"invalid archive_info.hash format: {hash!r}"
f"invalid archive_info.hash format: {value!r}"
)
if hashes is None:
hashes = {hash_name: hash_value}
elif hash_name not in hash:
hashes = hashes.copy()
hashes[hash_name] = hash_value
self.hash = hash
self.hashes = hashes
if self.hashes is None:
self.hashes = {hash_name: hash_value}
elif hash_name not in self.hashes:
self.hashes = self.hashes.copy()
self.hashes[hash_name] = hash_value
self._hash = value
@classmethod
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:

@ -14,7 +14,7 @@ class InstallationReport:
def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]:
assert ireq.download_info, f"No download_info for {ireq}"
res = {
# PEP 610 json for the download URL. download_info.archive_info.hash may
# PEP 610 json for the download URL. download_info.archive_info.hashes may
# be absent when the requirement was installed from the wheel cache
# and the cache entry was populated by an older pip version that did not
# record origin.json.
@ -22,7 +22,10 @@ class InstallationReport:
# is_direct is true if the requirement was a direct URL reference (which
# includes editable requirements), and false if the requirement was
# downloaded from a PEP 503 index or --find-links.
"is_direct": bool(ireq.original_link),
"is_direct": ireq.is_direct,
# is_yanked is true if the requirement was yanked from the index, but
# was still selected by pip to conform to PEP 592.
"is_yanked": ireq.link.is_yanked if ireq.link else False,
# requested is true if the requirement was specified by the user (aka
# top level requirement), and false if it was installed as a dependency of a
# requirement. https://peps.python.org/pep-0376/#requested
@ -33,7 +36,7 @@ class InstallationReport:
}
if ireq.user_supplied and ireq.extras:
# For top level requirements, the list of requested extras, if any.
res["requested_extras"] = list(sorted(ireq.extras))
res["requested_extras"] = sorted(ireq.extras)
return res
def to_dict(self) -> Dict[str, Any]:

@ -55,25 +55,25 @@ class LinkHash:
name: str
value: str
_hash_re = re.compile(
_hash_url_fragment_re = re.compile(
# NB: we do not validate that the second group (.*) is a valid hex
# digest. Instead, we simply keep that string in this class, and then check it
# against Hashes when hash-checking is needed. This is easier to debug than
# proactively discarding an invalid hex digest, as we handle incorrect hashes
# and malformed hashes in the same place.
r"({choices})=(.*)".format(
r"[#&]({choices})=([^&]*)".format(
choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES)
),
)
def __post_init__(self) -> None:
assert self._hash_re.match(f"{self.name}={self.value}")
assert self.name in _SUPPORTED_HASHES
@classmethod
@functools.lru_cache(maxsize=None)
def split_hash_name_and_value(cls, url: str) -> Optional["LinkHash"]:
def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]:
"""Search a string for a checksum algorithm name and encoded output value."""
match = cls._hash_re.search(url)
match = cls._hash_url_fragment_re.search(url)
if match is None:
return None
name, value = match.groups()
@ -95,6 +95,28 @@ class LinkHash:
return hashes.is_hash_allowed(self.name, hex_digest=self.value)
@dataclass(frozen=True)
class MetadataFile:
"""Information about a core metadata file associated with a distribution."""
hashes: Optional[Dict[str, str]]
def __post_init__(self) -> None:
if self.hashes is not None:
assert all(name in _SUPPORTED_HASHES for name in self.hashes)
def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]:
# Remove any unsupported hash types from the mapping. If this leaves no
# supported hashes, return None
if hashes is None:
return None
hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
if not hashes:
return None
return hashes
def _clean_url_path_part(part: str) -> str:
"""
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
@ -167,7 +189,7 @@ class Link(KeyBasedCompareMixin):
"comes_from",
"requires_python",
"yanked_reason",
"dist_info_metadata",
"metadata_file_data",
"cache_link_parsing",
"egg_fragment",
]
@ -178,7 +200,7 @@ class Link(KeyBasedCompareMixin):
comes_from: Optional[Union[str, "IndexContent"]] = None,
requires_python: Optional[str] = None,
yanked_reason: Optional[str] = None,
dist_info_metadata: Optional[str] = None,
metadata_file_data: Optional[MetadataFile] = None,
cache_link_parsing: bool = True,
hashes: Optional[Mapping[str, str]] = None,
) -> None:
@ -196,11 +218,10 @@ class Link(KeyBasedCompareMixin):
a simple repository HTML link. If the file has been yanked but
no reason was provided, this should be the empty string. See
PEP 592 for more information and the specification.
:param dist_info_metadata: the metadata attached to the file, or None if no such
metadata is provided. This is the value of the "data-dist-info-metadata"
attribute, if present, in a simple repository HTML link. This may be parsed
into its own `Link` by `self.metadata_link()`. See PEP 658 for more
information and the specification.
:param metadata_file_data: the metadata attached to the file, or None if
no such metadata is provided. This argument, if not None, indicates
that a separate metadata file exists, and also optionally supplies
hashes for that file.
:param cache_link_parsing: A flag that is used elsewhere to determine
whether resources retrieved from this link should be cached. PyPI
URLs should generally have this set to False, for example.
@ -208,6 +229,10 @@ class Link(KeyBasedCompareMixin):
determine the validity of a download.
"""
# The comes_from, requires_python, and metadata_file_data arguments are
# only used by classmethods of this class, and are not used in client
# code directly.
# url can be a UNC windows share
if url.startswith("\\\\"):
url = path_to_url(url)
@ -217,7 +242,7 @@ class Link(KeyBasedCompareMixin):
# trying to set a new value.
self._url = url
link_hash = LinkHash.split_hash_name_and_value(url)
link_hash = LinkHash.find_hash_url_fragment(url)
hashes_from_link = {} if link_hash is None else link_hash.as_dict()
if hashes is None:
self._hashes = hashes_from_link
@ -227,7 +252,7 @@ class Link(KeyBasedCompareMixin):
self.comes_from = comes_from
self.requires_python = requires_python if requires_python else None
self.yanked_reason = yanked_reason
self.dist_info_metadata = dist_info_metadata
self.metadata_file_data = metadata_file_data
super().__init__(key=url, defining_class=Link)
@ -250,9 +275,25 @@ class Link(KeyBasedCompareMixin):
url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url))
pyrequire = file_data.get("requires-python")
yanked_reason = file_data.get("yanked")
dist_info_metadata = file_data.get("dist-info-metadata")
hashes = file_data.get("hashes", {})
# PEP 714: Indexes must use the name core-metadata, but
# clients should support the old name as a fallback for compatibility.
metadata_info = file_data.get("core-metadata")
if metadata_info is None:
metadata_info = file_data.get("dist-info-metadata")
# The metadata info value may be a boolean, or a dict of hashes.
if isinstance(metadata_info, dict):
# The file exists, and hashes have been supplied
metadata_file_data = MetadataFile(supported_hashes(metadata_info))
elif metadata_info:
# The file exists, but there are no hashes
metadata_file_data = MetadataFile(None)
else:
# False or not present: the file does not exist
metadata_file_data = None
# The Link.yanked_reason expects an empty string instead of a boolean.
if yanked_reason and not isinstance(yanked_reason, str):
yanked_reason = ""
@ -266,7 +307,7 @@ class Link(KeyBasedCompareMixin):
requires_python=pyrequire,
yanked_reason=yanked_reason,
hashes=hashes,
dist_info_metadata=dist_info_metadata,
metadata_file_data=metadata_file_data,
)
@classmethod
@ -286,14 +327,39 @@ class Link(KeyBasedCompareMixin):
url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href))
pyrequire = anchor_attribs.get("data-requires-python")
yanked_reason = anchor_attribs.get("data-yanked")
dist_info_metadata = anchor_attribs.get("data-dist-info-metadata")
# PEP 714: Indexes must use the name data-core-metadata, but
# clients should support the old name as a fallback for compatibility.
metadata_info = anchor_attribs.get("data-core-metadata")
if metadata_info is None:
metadata_info = anchor_attribs.get("data-dist-info-metadata")
# The metadata info value may be the string "true", or a string of
# the form "hashname=hashval"
if metadata_info == "true":
# The file exists, but there are no hashes
metadata_file_data = MetadataFile(None)
elif metadata_info is None:
# The file does not exist
metadata_file_data = None
else:
# The file exists, and hashes have been supplied
hashname, sep, hashval = metadata_info.partition("=")
if sep == "=":
metadata_file_data = MetadataFile(supported_hashes({hashname: hashval}))
else:
# Error - data is wrong. Treat as no hashes supplied.
logger.debug(
"Index returned invalid data-dist-info-metadata value: %s",
metadata_info,
)
metadata_file_data = MetadataFile(None)
return cls(
url,
comes_from=page_url,
requires_python=pyrequire,
yanked_reason=yanked_reason,
dist_info_metadata=dist_info_metadata,
metadata_file_data=metadata_file_data,
)
def __str__(self) -> str:
@ -395,22 +461,13 @@ class Link(KeyBasedCompareMixin):
return match.group(1)
def metadata_link(self) -> Optional["Link"]:
"""Implementation of PEP 658 parsing."""
# Note that Link.from_element() parsing the "data-dist-info-metadata" attribute
# from an HTML anchor tag is typically how the Link.dist_info_metadata attribute
# gets set.
if self.dist_info_metadata is None:
"""Return a link to the associated core metadata file (if any)."""
if self.metadata_file_data is None:
return None
metadata_url = f"{self.url_without_fragment}.metadata"
# If data-dist-info-metadata="true" is set, then the metadata file exists,
# but there is no information about its checksum or anything else.
if self.dist_info_metadata != "true":
link_hash = LinkHash.split_hash_name_and_value(self.dist_info_metadata)
else:
link_hash = None
if link_hash is None:
if self.metadata_file_data.hashes is None:
return Link(metadata_url)
return Link(metadata_url, hashes=link_hash.as_dict())
return Link(metadata_url, hashes=self.metadata_file_data.hashes)
def as_hashes(self) -> Hashes:
return Hashes({k: [v] for k, v in self._hashes.items()})

@ -79,7 +79,6 @@ class SearchScope:
redacted_index_urls = []
if self.index_urls and self.index_urls != [PyPI.simple_url]:
for url in self.index_urls:
redacted_index_url = redact_auth_from_url(url)
# Parse the URL

@ -1,5 +1,5 @@
import sys
from typing import List, Optional, Tuple
from typing import List, Optional, Set, Tuple
from pip._vendor.packaging.tags import Tag
@ -22,6 +22,7 @@ class TargetPython:
"py_version",
"py_version_info",
"_valid_tags",
"_valid_tags_set",
]
def __init__(
@ -61,8 +62,9 @@ class TargetPython:
self.py_version = py_version
self.py_version_info = py_version_info
# This is used to cache the return value of get_tags().
# This is used to cache the return value of get_(un)sorted_tags.
self._valid_tags: Optional[List[Tag]] = None
self._valid_tags_set: Optional[Set[Tag]] = None
def format_given(self) -> str:
"""
@ -84,7 +86,7 @@ class TargetPython:
f"{key}={value!r}" for key, value in key_values if value is not None
)
def get_tags(self) -> List[Tag]:
def get_sorted_tags(self) -> List[Tag]:
"""
Return the supported PEP 425 tags to check wheel candidates against.
@ -108,3 +110,13 @@ class TargetPython:
self._valid_tags = tags
return self._valid_tags
def get_unsorted_tags(self) -> Set[Tag]:
"""Exactly the same as get_sorted_tags, but returns a set.
This is important for performance.
"""
if self._valid_tags_set is None:
self._valid_tags_set = set(self.get_sorted_tags())
return self._valid_tags_set

@ -3,12 +3,17 @@
Contains interface (MultiDomainBasicAuth) and associated glue code for
providing credentials in the context of network requests.
"""
import logging
import os
import shutil
import subprocess
import sysconfig
import typing
import urllib.parse
from abc import ABC, abstractmethod
from functools import lru_cache
from os.path import commonprefix
from pathlib import Path
from typing import Any, Dict, List, NamedTuple, Optional, Tuple
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
@ -39,6 +44,8 @@ class Credentials(NamedTuple):
class KeyRingBaseProvider(ABC):
"""Keyring base provider interface"""
has_keyring: bool
@abstractmethod
def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
...
@ -51,6 +58,8 @@ class KeyRingBaseProvider(ABC):
class KeyRingNullProvider(KeyRingBaseProvider):
"""Keyring null provider"""
has_keyring = False
def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
return None
@ -61,6 +70,8 @@ class KeyRingNullProvider(KeyRingBaseProvider):
class KeyRingPythonProvider(KeyRingBaseProvider):
"""Keyring interface which uses locally imported `keyring`"""
has_keyring = True
def __init__(self) -> None:
import keyring
@ -97,6 +108,8 @@ class KeyRingCliProvider(KeyRingBaseProvider):
PATH.
"""
has_keyring = True
def __init__(self, cmd: str) -> None:
self.keyring = cmd
@ -123,7 +136,7 @@ class KeyRingCliProvider(KeyRingBaseProvider):
res = subprocess.run(
cmd,
stdin=subprocess.DEVNULL,
capture_output=True,
stdout=subprocess.PIPE,
env=env,
)
if res.returncode:
@ -134,66 +147,89 @@ class KeyRingCliProvider(KeyRingBaseProvider):
"""Mirror the implementation of keyring.set_password using cli"""
if self.keyring is None:
return None
cmd = [self.keyring, "set", service_name, username]
input_ = (password + os.linesep).encode("utf-8")
env = os.environ.copy()
env["PYTHONIOENCODING"] = "utf-8"
res = subprocess.run(cmd, input=input_, env=env)
res.check_returncode()
subprocess.run(
[self.keyring, "set", service_name, username],
input=f"{password}{os.linesep}".encode("utf-8"),
env=env,
check=True,
)
return None
def get_keyring_provider() -> KeyRingBaseProvider:
@lru_cache(maxsize=None)
def get_keyring_provider(provider: str) -> KeyRingBaseProvider:
logger.verbose("Keyring provider requested: %s", provider)
# keyring has previously failed and been disabled
if not KEYRING_DISABLED:
# Default to trying to use Python provider
if KEYRING_DISABLED:
provider = "disabled"
if provider in ["import", "auto"]:
try:
return KeyRingPythonProvider()
impl = KeyRingPythonProvider()
logger.verbose("Keyring provider set: import")
return impl
except ImportError:
pass
except Exception as exc:
# In the event of an unexpected exception
# we should warn the user
logger.warning(
"Installed copy of keyring fails with exception %s, "
"trying to find a keyring executable as a fallback",
str(exc),
)
# Fallback to Cli Provider if `keyring` isn't installed
msg = "Installed copy of keyring fails with exception %s"
if provider == "auto":
msg = msg + ", trying to find a keyring executable as a fallback"
logger.warning(msg, exc, exc_info=logger.isEnabledFor(logging.DEBUG))
if provider in ["subprocess", "auto"]:
cli = shutil.which("keyring")
if cli and cli.startswith(sysconfig.get_path("scripts")):
# all code within this function is stolen from shutil.which implementation
@typing.no_type_check
def PATH_as_shutil_which_determines_it() -> str:
path = os.environ.get("PATH", None)
if path is None:
try:
path = os.confstr("CS_PATH")
except (AttributeError, ValueError):
# os.confstr() or CS_PATH is not available
path = os.defpath
# bpo-35755: Don't use os.defpath if the PATH environment variable is
# set to an empty string
return path
scripts = Path(sysconfig.get_path("scripts"))
paths = []
for path in PATH_as_shutil_which_determines_it().split(os.pathsep):
p = Path(path)
try:
if not p.samefile(scripts):
paths.append(path)
except FileNotFoundError:
pass
path = os.pathsep.join(paths)
cli = shutil.which("keyring", path=path)
if cli:
logger.verbose("Keyring provider set: subprocess with executable %s", cli)
return KeyRingCliProvider(cli)
logger.verbose("Keyring provider set: disabled")
return KeyRingNullProvider()
def get_keyring_auth(url: Optional[str], username: Optional[str]) -> Optional[AuthInfo]:
"""Return the tuple auth for a given url from keyring."""
# Do nothing if no url was provided
if not url:
return None
keyring = get_keyring_provider()
try:
return keyring.get_auth_info(url, username)
except Exception as exc:
logger.warning(
"Keyring is skipped due to an exception: %s",
str(exc),
)
global KEYRING_DISABLED
KEYRING_DISABLED = True
return None
class MultiDomainBasicAuth(AuthBase):
def __init__(
self, prompting: bool = True, index_urls: Optional[List[str]] = None
self,
prompting: bool = True,
index_urls: Optional[List[str]] = None,
keyring_provider: str = "auto",
) -> None:
self.prompting = prompting
self.index_urls = index_urls
self.keyring_provider = keyring_provider # type: ignore[assignment]
self.passwords: Dict[str, AuthInfo] = {}
# When the user is prompted to enter credentials and keyring is
# available, we will offer to save them. If the user accepts,
@ -202,6 +238,47 @@ class MultiDomainBasicAuth(AuthBase):
# ``save_credentials`` to save these.
self._credentials_to_save: Optional[Credentials] = None
@property
def keyring_provider(self) -> KeyRingBaseProvider:
return get_keyring_provider(self._keyring_provider)
@keyring_provider.setter
def keyring_provider(self, provider: str) -> None:
# The free function get_keyring_provider has been decorated with
# functools.cache. If an exception occurs in get_keyring_auth that
# cache will be cleared and keyring disabled, take that into account
# if you want to remove this indirection.
self._keyring_provider = provider
@property
def use_keyring(self) -> bool:
# We won't use keyring when --no-input is passed unless
# a specific provider is requested because it might require
# user interaction
return self.prompting or self._keyring_provider not in ["auto", "disabled"]
def _get_keyring_auth(
self,
url: Optional[str],
username: Optional[str],
) -> Optional[AuthInfo]:
"""Return the tuple auth for a given url from keyring."""
# Do nothing if no url was provided
if not url:
return None
try:
return self.keyring_provider.get_auth_info(url, username)
except Exception as exc:
logger.warning(
"Keyring is skipped due to an exception: %s",
str(exc),
)
global KEYRING_DISABLED
KEYRING_DISABLED = True
get_keyring_provider.cache_clear()
return None
def _get_index_url(self, url: str) -> Optional[str]:
"""Return the original index URL matching the requested URL.
@ -218,15 +295,42 @@ class MultiDomainBasicAuth(AuthBase):
if not url or not self.index_urls:
return None
for u in self.index_urls:
prefix = remove_auth_from_url(u).rstrip("/") + "/"
if url.startswith(prefix):
return u
return None
url = remove_auth_from_url(url).rstrip("/") + "/"
parsed_url = urllib.parse.urlsplit(url)
candidates = []
for index in self.index_urls:
index = index.rstrip("/") + "/"
parsed_index = urllib.parse.urlsplit(remove_auth_from_url(index))
if parsed_url == parsed_index:
return index
if parsed_url.netloc != parsed_index.netloc:
continue
candidate = urllib.parse.urlsplit(index)
candidates.append(candidate)
if not candidates:
return None
candidates.sort(
reverse=True,
key=lambda candidate: commonprefix(
[
parsed_url.path,
candidate.path,
]
).rfind("/"),
)
return urllib.parse.urlunsplit(candidates[0])
def _get_new_credentials(
self,
original_url: str,
*,
allow_netrc: bool = True,
allow_keyring: bool = False,
) -> AuthInfo:
@ -270,8 +374,8 @@ class MultiDomainBasicAuth(AuthBase):
# The index url is more specific than the netloc, so try it first
# fmt: off
kr_auth = (
get_keyring_auth(index_url, username) or
get_keyring_auth(netloc, username)
self._get_keyring_auth(index_url, username) or
self._get_keyring_auth(netloc, username)
)
# fmt: on
if kr_auth:
@ -348,18 +452,23 @@ class MultiDomainBasicAuth(AuthBase):
def _prompt_for_password(
self, netloc: str
) -> Tuple[Optional[str], Optional[str], bool]:
username = ask_input(f"User for {netloc}: ")
username = ask_input(f"User for {netloc}: ") if self.prompting else None
if not username:
return None, None, False
auth = get_keyring_auth(netloc, username)
if auth and auth[0] is not None and auth[1] is not None:
return auth[0], auth[1], False
if self.use_keyring:
auth = self._get_keyring_auth(netloc, username)
if auth and auth[0] is not None and auth[1] is not None:
return auth[0], auth[1], False
password = ask_password("Password: ")
return username, password, True
# Factored out to allow for easy patching in tests
def _should_save_password_to_keyring(self) -> bool:
if get_keyring_provider() is None:
if (
not self.prompting
or not self.use_keyring
or not self.keyring_provider.has_keyring
):
return False
return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
@ -369,19 +478,22 @@ class MultiDomainBasicAuth(AuthBase):
if resp.status_code != 401:
return resp
username, password = None, None
# Query the keyring for credentials:
if self.use_keyring:
username, password = self._get_new_credentials(
resp.url,
allow_netrc=False,
allow_keyring=True,
)
# We are not able to prompt the user so simply return the response
if not self.prompting:
if not self.prompting and not username and not password:
return resp
parsed = urllib.parse.urlparse(resp.url)
# Query the keyring for credentials:
username, password = self._get_new_credentials(
resp.url,
allow_netrc=False,
allow_keyring=True,
)
# Prompt the user for a new username and password
save = False
if not username and not password:
@ -402,7 +514,9 @@ class MultiDomainBasicAuth(AuthBase):
# Consume content and release the original connection to allow our new
# request to reuse the same one.
resp.content
# The result of the assignment isn't used, it's just needed to consume
# the content.
_ = resp.content
resp.raw.release_conn()
# Add our new username and password to the request
@ -431,9 +545,8 @@ class MultiDomainBasicAuth(AuthBase):
def save_credentials(self, resp: Response, **kwargs: Any) -> None:
"""Response callback to save credentials on success."""
keyring = get_keyring_provider()
assert not isinstance(
keyring, KeyRingNullProvider
assert (
self.keyring_provider.has_keyring
), "should never reach here without keyring"
creds = self._credentials_to_save
@ -441,6 +554,8 @@ class MultiDomainBasicAuth(AuthBase):
if creds and resp.status_code < 400:
try:
logger.info("Saving credentials to keyring")
keyring.save_auth_info(creds.url, creds.username, creds.password)
self.keyring_provider.save_auth_info(
creds.url, creds.username, creds.password
)
except Exception:
logger.exception("Failed to save credentials")

@ -3,10 +3,11 @@
import os
from contextlib import contextmanager
from typing import Generator, Optional
from datetime import datetime
from typing import BinaryIO, Generator, Optional, Union
from pip._vendor.cachecontrol.cache import BaseCache
from pip._vendor.cachecontrol.caches import FileCache
from pip._vendor.cachecontrol.cache import SeparateBodyBaseCache
from pip._vendor.cachecontrol.caches import SeparateBodyFileCache
from pip._vendor.requests.models import Response
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
@ -28,7 +29,7 @@ def suppressed_cache_errors() -> Generator[None, None, None]:
pass
class SafeFileCache(BaseCache):
class SafeFileCache(SeparateBodyBaseCache):
"""
A file based cache which is safe to use even when the target directory may
not be accessible or writable.
@ -43,7 +44,7 @@ class SafeFileCache(BaseCache):
# From cachecontrol.caches.file_cache.FileCache._fn, brought into our
# class for backwards-compatibility and to avoid using a non-public
# method.
hashed = FileCache.encode(name)
hashed = SeparateBodyFileCache.encode(name)
parts = list(hashed[:5]) + [hashed]
return os.path.join(self.directory, *parts)
@ -53,17 +54,33 @@ class SafeFileCache(BaseCache):
with open(path, "rb") as f:
return f.read()
def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None:
path = self._get_cache_path(key)
def _write(self, path: str, data: bytes) -> None:
with suppressed_cache_errors():
ensure_dir(os.path.dirname(path))
with adjacent_tmp_file(path) as f:
f.write(value)
f.write(data)
replace(f.name, path)
def set(
self, key: str, value: bytes, expires: Union[int, datetime, None] = None
) -> None:
path = self._get_cache_path(key)
self._write(path, value)
def delete(self, key: str) -> None:
path = self._get_cache_path(key)
with suppressed_cache_errors():
os.remove(path)
with suppressed_cache_errors():
os.remove(path + ".body")
def get_body(self, key: str) -> Optional[BinaryIO]:
path = self._get_cache_path(key) + ".body"
with suppressed_cache_errors():
return open(path, "rb")
def set_body(self, key: str, body: bytes) -> None:
path = self._get_cache_path(key) + ".body"
self._write(path, body)

@ -6,7 +6,7 @@ from bisect import bisect_left, bisect_right
from contextlib import contextmanager
from tempfile import NamedTemporaryFile
from typing import Any, Dict, Generator, List, Optional, Tuple
from zipfile import BadZipfile, ZipFile
from zipfile import BadZipFile, ZipFile
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
@ -160,7 +160,7 @@ class LazyZipOverHTTP:
# For read-only ZIP files, ZipFile only needs
# methods read, seek, seekable and tell.
ZipFile(self) # type: ignore
except BadZipfile:
except BadZipFile:
pass
else:
break

@ -316,7 +316,6 @@ class InsecureCacheControlAdapter(CacheControlAdapter):
class PipSession(requests.Session):
timeout: Optional[int] = None
def __init__(
@ -420,15 +419,17 @@ class PipSession(requests.Session):
msg += f" (from {source})"
logger.info(msg)
host_port = parse_netloc(host)
if host_port not in self.pip_trusted_origins:
self.pip_trusted_origins.append(host_port)
parsed_host, parsed_port = parse_netloc(host)
if parsed_host is None:
raise ValueError(f"Trusted host URL must include a host part: {host!r}")
if (parsed_host, parsed_port) not in self.pip_trusted_origins:
self.pip_trusted_origins.append((parsed_host, parsed_port))
self.mount(
build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter
)
self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter)
if not host_port[1]:
if not parsed_port:
self.mount(
build_url_from_netloc(host, scheme="http") + ":",
self._trusted_host_adapter,

@ -51,10 +51,22 @@ def get_build_tracker() -> Generator["BuildTracker", None, None]:
yield tracker
class TrackerId(str):
"""Uniquely identifying string provided to the build tracker."""
class BuildTracker:
"""Ensure that an sdist cannot request itself as a setup requirement.
When an sdist is prepared, it identifies its setup requirements in the
context of ``BuildTracker.track()``. If a requirement shows up recursively, this
raises an exception.
This stops fork bombs embedded in malicious packages."""
def __init__(self, root: str) -> None:
self._root = root
self._entries: Set[InstallRequirement] = set()
self._entries: Dict[TrackerId, InstallRequirement] = {}
logger.debug("Created build tracker: %s", self._root)
def __enter__(self) -> "BuildTracker":
@ -69,16 +81,15 @@ class BuildTracker:
) -> None:
self.cleanup()
def _entry_path(self, link: Link) -> str:
hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
def _entry_path(self, key: TrackerId) -> str:
hashed = hashlib.sha224(key.encode()).hexdigest()
return os.path.join(self._root, hashed)
def add(self, req: InstallRequirement) -> None:
def add(self, req: InstallRequirement, key: TrackerId) -> None:
"""Add an InstallRequirement to build tracking."""
assert req.link
# Get the file to write information about this requirement.
entry_path = self._entry_path(req.link)
entry_path = self._entry_path(key)
# Try reading from the file. If it exists and can be read from, a build
# is already in progress, so a LookupError is raised.
@ -92,33 +103,37 @@ class BuildTracker:
raise LookupError(message)
# If we're here, req should really not be building already.
assert req not in self._entries
assert key not in self._entries
# Start tracking this requirement.
with open(entry_path, "w", encoding="utf-8") as fp:
fp.write(str(req))
self._entries.add(req)
self._entries[key] = req
logger.debug("Added %s to build tracker %r", req, self._root)
def remove(self, req: InstallRequirement) -> None:
def remove(self, req: InstallRequirement, key: TrackerId) -> None:
"""Remove an InstallRequirement from build tracking."""
assert req.link
# Delete the created file and the corresponding entries.
os.unlink(self._entry_path(req.link))
self._entries.remove(req)
# Delete the created file and the corresponding entry.
os.unlink(self._entry_path(key))
del self._entries[key]
logger.debug("Removed %s from build tracker %r", req, self._root)
def cleanup(self) -> None:
for req in set(self._entries):
self.remove(req)
for key, req in list(self._entries.items()):
self.remove(req, key)
logger.debug("Removed build tracker: %r", self._root)
@contextlib.contextmanager
def track(self, req: InstallRequirement) -> Generator[None, None, None]:
self.add(req)
def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]:
"""Ensure that `key` cannot install itself as a setup requirement.
:raises LookupError: If `key` was already provided in a parent invocation of
the context introduced by this method."""
tracker_id = TrackerId(key)
self.add(req, tracker_id)
yield
self.remove(req)
self.remove(req, tracker_id)

@ -5,12 +5,15 @@ import logging
from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple
from pip._vendor.packaging.requirements import Requirement
from pip._vendor.packaging.specifiers import LegacySpecifier
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._vendor.packaging.version import LegacyVersion
from pip._internal.distributions import make_distribution_for_install_requirement
from pip._internal.metadata import get_default_environment
from pip._internal.metadata.base import DistributionVersion
from pip._internal.req.req_install import InstallRequirement
from pip._internal.utils.deprecation import deprecated
logger = logging.getLogger(__name__)
@ -57,6 +60,8 @@ def check_package_set(
package name and returns a boolean.
"""
warn_legacy_versions_and_specifiers(package_set)
missing = {}
conflicting = {}
@ -147,3 +152,36 @@ def _create_whitelist(
break
return packages_affected
def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None:
for project_name, package_details in package_set.items():
if isinstance(package_details.version, LegacyVersion):
deprecated(
reason=(
f"{project_name} {package_details.version} "
f"has a non-standard version number."
),
replacement=(
f"to upgrade to a newer version of {project_name} "
f"or contact the author to suggest that they "
f"release a version with a conforming version number"
),
issue=12063,
gone_in="24.0",
)
for dep in package_details.dependencies:
if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
deprecated(
reason=(
f"{project_name} {package_details.version} "
f"has a non-standard dependency specifier {dep}."
),
replacement=(
f"to upgrade to a newer version of {project_name} "
f"or contact the author to suggest that they "
f"release a version with a conforming dependency specifiers"
),
issue=12063,
gone_in="24.0",
)

@ -145,9 +145,10 @@ def freeze(
def _format_as_name_version(dist: BaseDistribution) -> str:
if isinstance(dist.version, Version):
return f"{dist.raw_name}=={dist.version}"
return f"{dist.raw_name}==={dist.version}"
dist_version = dist.version
if isinstance(dist_version, Version):
return f"{dist.raw_name}=={dist_version}"
return f"{dist.raw_name}==={dist_version}"
def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:

@ -1,7 +1,7 @@
"""Legacy editable installation process, i.e. `setup.py develop`.
"""
import logging
from typing import List, Optional, Sequence
from typing import Optional, Sequence
from pip._internal.build_env import BuildEnvironment
from pip._internal.utils.logging import indent_log
@ -12,7 +12,7 @@ logger = logging.getLogger(__name__)
def install_editable(
install_options: List[str],
*,
global_options: Sequence[str],
prefix: Optional[str],
home: Optional[str],
@ -31,7 +31,6 @@ def install_editable(
args = make_setuptools_develop_args(
setup_py_path,
global_options=global_options,
install_options=install_options,
no_user_config=isolated,
prefix=prefix,
home=home,

@ -1,120 +0,0 @@
"""Legacy installation process, i.e. `setup.py install`.
"""
import logging
import os
from typing import List, Optional, Sequence
from pip._internal.build_env import BuildEnvironment
from pip._internal.exceptions import InstallationError, LegacyInstallFailure
from pip._internal.locations.base import change_root
from pip._internal.models.scheme import Scheme
from pip._internal.utils.misc import ensure_dir
from pip._internal.utils.setuptools_build import make_setuptools_install_args
from pip._internal.utils.subprocess import runner_with_spinner_message
from pip._internal.utils.temp_dir import TempDirectory
logger = logging.getLogger(__name__)
def write_installed_files_from_setuptools_record(
record_lines: List[str],
root: Optional[str],
req_description: str,
) -> None:
def prepend_root(path: str) -> str:
if root is None or not os.path.isabs(path):
return path
else:
return change_root(root, path)
for line in record_lines:
directory = os.path.dirname(line)
if directory.endswith(".egg-info"):
egg_info_dir = prepend_root(directory)
break
else:
message = (
"{} did not indicate that it installed an "
".egg-info directory. Only setup.py projects "
"generating .egg-info directories are supported."
).format(req_description)
raise InstallationError(message)
new_lines = []
for line in record_lines:
filename = line.strip()
if os.path.isdir(filename):
filename += os.path.sep
new_lines.append(os.path.relpath(prepend_root(filename), egg_info_dir))
new_lines.sort()
ensure_dir(egg_info_dir)
inst_files_path = os.path.join(egg_info_dir, "installed-files.txt")
with open(inst_files_path, "w") as f:
f.write("\n".join(new_lines) + "\n")
def install(
install_options: List[str],
global_options: Sequence[str],
root: Optional[str],
home: Optional[str],
prefix: Optional[str],
use_user_site: bool,
pycompile: bool,
scheme: Scheme,
setup_py_path: str,
isolated: bool,
req_name: str,
build_env: BuildEnvironment,
unpacked_source_directory: str,
req_description: str,
) -> bool:
header_dir = scheme.headers
with TempDirectory(kind="record") as temp_dir:
try:
record_filename = os.path.join(temp_dir.path, "install-record.txt")
install_args = make_setuptools_install_args(
setup_py_path,
global_options=global_options,
install_options=install_options,
record_filename=record_filename,
root=root,
prefix=prefix,
header_dir=header_dir,
home=home,
use_user_site=use_user_site,
no_user_config=isolated,
pycompile=pycompile,
)
runner = runner_with_spinner_message(
f"Running setup.py install for {req_name}"
)
with build_env:
runner(
cmd=install_args,
cwd=unpacked_source_directory,
)
if not os.path.exists(record_filename):
logger.debug("Record file %s not found", record_filename)
# Signal to the caller that we didn't install the new package
return False
except Exception as e:
# Signal to the caller that we didn't install the new package
raise LegacyInstallFailure(package_details=req_name) from e
# At this point, we have successfully installed the requirement.
# We intentionally do not use any encoding to read the file because
# setuptools writes the file using distutils.file_util.write_file,
# which does not specify an encoding.
with open(record_filename) as f:
record_lines = f.read().splitlines()
write_installed_files_from_setuptools_record(record_lines, root, req_description)
return True

@ -143,16 +143,18 @@ def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]:
# We don't want to warn for directories that are on PATH.
not_warn_dirs = [
os.path.normcase(i).rstrip(os.sep)
os.path.normcase(os.path.normpath(i)).rstrip(os.sep)
for i in os.environ.get("PATH", "").split(os.pathsep)
]
# If an executable sits with sys.executable, we don't warn for it.
# This covers the case of venv invocations without activating the venv.
not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
not_warn_dirs.append(
os.path.normcase(os.path.normpath(os.path.dirname(sys.executable)))
)
warn_for: Dict[str, Set[str]] = {
parent_dir: scripts
for parent_dir, scripts in grouped_by_dir.items()
if os.path.normcase(parent_dir) not in not_warn_dirs
if os.path.normcase(os.path.normpath(parent_dir)) not in not_warn_dirs
}
if not warn_for:
return None
@ -265,9 +267,9 @@ def get_csv_rows_for_installed(
path = _fs_to_record_path(f, lib_dir)
digest, length = rehash(f)
installed_rows.append((path, digest, length))
for installed_record_path in installed.values():
installed_rows.append((installed_record_path, "", ""))
return installed_rows
return installed_rows + [
(installed_record_path, "", "") for installed_record_path in installed.values()
]
def get_console_script_specs(console: Dict[str, str]) -> List[str]:

@ -4,10 +4,10 @@
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
import logging
import mimetypes
import os
import shutil
from pathlib import Path
from typing import Dict, Iterable, List, Optional
from pip._vendor.packaging.utils import canonicalize_name
@ -21,7 +21,6 @@ from pip._internal.exceptions import (
InstallationError,
MetadataInconsistent,
NetworkConnectionError,
PreviousBuildDirError,
VcsHashUnsupported,
)
from pip._internal.index.package_finder import PackageFinder
@ -37,6 +36,7 @@ from pip._internal.network.lazy_wheel import (
from pip._internal.network.session import PipSession
from pip._internal.operations.build.build_tracker import BuildTracker
from pip._internal.req.req_install import InstallRequirement
from pip._internal.utils._log import getLogger
from pip._internal.utils.direct_url_helpers import (
direct_url_for_editable,
direct_url_from_link,
@ -47,13 +47,13 @@ from pip._internal.utils.misc import (
display_path,
hash_file,
hide_url,
is_installable_dir,
redact_auth_from_requirement,
)
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.unpacking import unpack_file
from pip._internal.vcs import vcs
logger = logging.getLogger(__name__)
logger = getLogger(__name__)
def _get_prepared_distribution(
@ -65,10 +65,12 @@ def _get_prepared_distribution(
) -> BaseDistribution:
"""Prepare a distribution for installation."""
abstract_dist = make_distribution_for_install_requirement(req)
with build_tracker.track(req):
abstract_dist.prepare_distribution_metadata(
finder, build_isolation, check_build_deps
)
tracker_id = abstract_dist.build_tracker_id
if tracker_id is not None:
with build_tracker.track(req, tracker_id):
abstract_dist.prepare_distribution_metadata(
finder, build_isolation, check_build_deps
)
return abstract_dist.get_metadata_distribution()
@ -179,7 +181,10 @@ def unpack_url(
def _check_download_dir(
link: Link, download_dir: str, hashes: Optional[Hashes]
link: Link,
download_dir: str,
hashes: Optional[Hashes],
warn_on_hash_mismatch: bool = True,
) -> Optional[str]:
"""Check download_dir for previously downloaded file with correct hash
If a correct file is found return its path else None
@ -195,10 +200,11 @@ def _check_download_dir(
try:
hashes.check_against_path(download_path)
except HashMismatch:
logger.warning(
"Previously-downloaded file %s has bad hash. Re-downloading.",
download_path,
)
if warn_on_hash_mismatch:
logger.warning(
"Previously-downloaded file %s has bad hash. Re-downloading.",
download_path,
)
os.unlink(download_path)
return None
return download_path
@ -222,6 +228,7 @@ class RequirementPreparer:
use_user_site: bool,
lazy_wheel: bool,
verbosity: int,
legacy_resolver: bool,
) -> None:
super().__init__()
@ -255,6 +262,9 @@ class RequirementPreparer:
# How verbose should underlying tooling be?
self.verbosity = verbosity
# Are we using the legacy resolver?
self.legacy_resolver = legacy_resolver
# Memoized downloaded files, as mapping of url: path.
self._downloaded: Dict[str, str] = {}
@ -263,18 +273,28 @@ class RequirementPreparer:
def _log_preparing_link(self, req: InstallRequirement) -> None:
"""Provide context for the requirement being prepared."""
if req.link.is_file and not req.original_link_is_in_wheel_cache:
if req.link.is_file and not req.is_wheel_from_cache:
message = "Processing %s"
information = str(display_path(req.link.file_path))
else:
message = "Collecting %s"
information = str(req.req or req)
information = redact_auth_from_requirement(req.req) if req.req else str(req)
# If we used req.req, inject requirement source if available (this
# would already be included if we used req directly)
if req.req and req.comes_from:
if isinstance(req.comes_from, str):
comes_from: Optional[str] = req.comes_from
else:
comes_from = req.comes_from.from_path()
if comes_from:
information += f" (from {comes_from})"
if (message, information) != self._previous_requirement_header:
self._previous_requirement_header = (message, information)
logger.info(message, information)
if req.original_link_is_in_wheel_cache:
if req.is_wheel_from_cache:
with indent_log():
logger.info("Using cached %s", req.link.filename)
@ -299,21 +319,7 @@ class RequirementPreparer:
autodelete=True,
parallel_builds=parallel_builds,
)
# If a checkout exists, it's unwise to keep going. version
# inconsistencies are logged later, but do not fail the
# installation.
# FIXME: this won't upgrade when there's an existing
# package unpacked in `req.source_dir`
# TODO: this check is now probably dead code
if is_installable_dir(req.source_dir):
raise PreviousBuildDirError(
"pip can't proceed with requirements '{}' due to a"
"pre-existing build directory ({}). This is likely "
"due to a previous installation that failed . pip is "
"being responsible and not assuming it can delete this. "
"Please delete it and try again.".format(req, req.source_dir)
)
req.ensure_pristine_source_checkout()
def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
# By the time this is called, the requirement's link should have
@ -338,7 +344,7 @@ class RequirementPreparer:
# a surprising hash mismatch in the future.
# file:/// URLs aren't pinnable, so don't complain about them
# not being pinned.
if req.original_link is None and not req.is_pinned:
if not req.is_direct and not req.is_pinned:
raise HashUnpinned()
# If known-good hashes are missing for this requirement,
@ -351,6 +357,11 @@ class RequirementPreparer:
self,
req: InstallRequirement,
) -> Optional[BaseDistribution]:
if self.legacy_resolver:
logger.debug(
"Metadata-only fetching is not used in the legacy resolver",
)
return None
if self.require_hashes:
logger.debug(
"Metadata-only fetching is not used as hash checking is required",
@ -371,7 +382,7 @@ class RequirementPreparer:
if metadata_link is None:
return None
assert req.req is not None
logger.info(
logger.verbose(
"Obtaining dependency information for %s from %s",
req.req,
metadata_link,
@ -396,7 +407,7 @@ class RequirementPreparer:
# NB: raw_name will fall back to the name from the install requirement if
# the Name: field is not present, but it's noted in the raw_name docstring
# that that should NEVER happen anyway.
if metadata_dist.raw_name != req.req.name:
if canonicalize_name(metadata_dist.raw_name) != canonicalize_name(req.req.name):
raise MetadataInconsistent(
req, "Name", req.req.name, metadata_dist.raw_name
)
@ -456,7 +467,19 @@ class RequirementPreparer:
for link, (filepath, _) in batch_download:
logger.debug("Downloading link %s to %s", link, filepath)
req = links_to_fully_download[link]
# Record the downloaded file path so wheel reqs can extract a Distribution
# in .get_dist().
req.local_file_path = filepath
# Record that the file is downloaded so we don't do it again in
# _prepare_linked_requirement().
self._downloaded[req.link.url] = filepath
# If this is an sdist, we need to unpack it after downloading, but the
# .source_dir won't be set up until we are in _prepare_linked_requirement().
# Add the downloaded archive to the install requirement to unpack after
# preparing the source dir.
if not req.is_wheel:
req.needs_unpacked_archive(Path(filepath))
# This step is necessary to ensure all lazy wheels are processed
# successfully by the 'download', 'wheel', and 'install' commands.
@ -475,7 +498,18 @@ class RequirementPreparer:
file_path = None
if self.download_dir is not None and req.link.is_wheel:
hashes = self._get_linked_req_hashes(req)
file_path = _check_download_dir(req.link, self.download_dir, hashes)
file_path = _check_download_dir(
req.link,
self.download_dir,
hashes,
# When a locally built wheel has been found in cache, we don't warn
# about re-downloading when the already downloaded wheel hash does
# not match. This is because the hash must be checked against the
# original link, not the cached link. It that case the already
# downloaded file will be removed and re-fetched from cache (which
# implies a hash check against the cache entry's origin.json).
warn_on_hash_mismatch=not req.is_wheel_from_cache,
)
if file_path is not None:
# The file is already available, so mark it as downloaded
@ -526,9 +560,35 @@ class RequirementPreparer:
assert req.link
link = req.link
self._ensure_link_req_src_dir(req, parallel_builds)
hashes = self._get_linked_req_hashes(req)
if hashes and req.is_wheel_from_cache:
assert req.download_info is not None
assert link.is_wheel
assert link.is_file
# We need to verify hashes, and we have found the requirement in the cache
# of locally built wheels.
if (
isinstance(req.download_info.info, ArchiveInfo)
and req.download_info.info.hashes
and hashes.has_one_of(req.download_info.info.hashes)
):
# At this point we know the requirement was built from a hashable source
# artifact, and we verified that the cache entry's hash of the original
# artifact matches one of the hashes we expect. We don't verify hashes
# against the cached wheel, because the wheel is not the original.
hashes = None
else:
logger.warning(
"The hashes of the source archive found in cache entry "
"don't match, ignoring cached built wheel "
"and re-downloading source."
)
req.link = req.cached_wheel_source_link
link = req.link
self._ensure_link_req_src_dir(req, parallel_builds)
if link.is_existing_dir():
local_file = None
elif link.url not in self._downloaded:
@ -561,12 +621,15 @@ class RequirementPreparer:
# Make sure we have a hash in download_info. If we got it as part of the
# URL, it will have been verified and we can rely on it. Otherwise we
# compute it from the downloaded file.
# FIXME: https://github.com/pypa/pip/issues/11943
if (
isinstance(req.download_info.info, ArchiveInfo)
and not req.download_info.info.hash
and not req.download_info.info.hashes
and local_file
):
hash = hash_file(local_file.path)[0].hexdigest()
# We populate info.hash for backward compatibility.
# This will automatically populate info.hashes.
req.download_info.info.hash = f"sha256={hash}"
# For use in later processing,

@ -91,14 +91,19 @@ def load_pyproject_toml(
# If we haven't worked out whether to use PEP 517 yet,
# and the user hasn't explicitly stated a preference,
# we do so if the project has a pyproject.toml file
# or if we cannot import setuptools.
# or if we cannot import setuptools or wheels.
# We fallback to PEP 517 when without setuptools,
# We fallback to PEP 517 when without setuptools or without the wheel package,
# so setuptools can be installed as a default build backend.
# For more info see:
# https://discuss.python.org/t/pip-without-setuptools-could-the-experience-be-improved/11810/9
# https://github.com/pypa/pip/issues/8559
elif use_pep517 is None:
use_pep517 = has_pyproject or not importlib.util.find_spec("setuptools")
use_pep517 = (
has_pyproject
or not importlib.util.find_spec("setuptools")
or not importlib.util.find_spec("wheel")
)
# At this point, we know whether we're going to use PEP 517.
assert use_pep517 is not None

@ -36,7 +36,6 @@ def _validate_requirements(
def install_given_reqs(
requirements: List[InstallRequirement],
install_options: List[str],
global_options: Sequence[str],
root: Optional[str],
home: Optional[str],
@ -71,7 +70,6 @@ def install_given_reqs(
try:
requirement.install(
install_options,
global_options,
root=root,
home=home,

@ -8,10 +8,11 @@ These are meant to be used elsewhere within pip to create instances of
InstallRequirement.
"""
import copy
import logging
import os
import re
from typing import Any, Dict, Optional, Set, Tuple, Union
from typing import Collection, Dict, List, Optional, Set, Tuple, Union
from pip._vendor.packaging.markers import Marker
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
@ -57,6 +58,31 @@ def convert_extras(extras: Optional[str]) -> Set[str]:
return get_requirement("placeholder" + extras.lower()).extras
def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requirement:
"""
Returns a new requirement based on the given one, with the supplied extras. If the
given requirement already has extras those are replaced (or dropped if no new extras
are given).
"""
match: Optional[re.Match[str]] = re.fullmatch(
# see https://peps.python.org/pep-0508/#complete-grammar
r"([\w\t .-]+)(\[[^\]]*\])?(.*)",
str(req),
flags=re.ASCII,
)
# ireq.req is a valid requirement so the regex should always match
assert (
match is not None
), f"regex match on requirement {req} failed, this should never happen"
pre: Optional[str] = match.group(1)
post: Optional[str] = match.group(3)
assert (
pre is not None and post is not None
), f"regex group selection for requirement {req} failed, this should never happen"
extras: str = "[%s]" % ",".join(sorted(new_extras)) if new_extras else ""
return Requirement(f"{pre}{extras}{post}")
def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
"""Parses an editable requirement into:
- a requirement name
@ -201,15 +227,16 @@ def parse_req_from_editable(editable_req: str) -> RequirementParts:
def install_req_from_editable(
editable_req: str,
comes_from: Optional[Union[InstallRequirement, str]] = None,
*,
use_pep517: Optional[bool] = None,
isolated: bool = False,
options: Optional[Dict[str, Any]] = None,
global_options: Optional[List[str]] = None,
hash_options: Optional[Dict[str, List[str]]] = None,
constraint: bool = False,
user_supplied: bool = False,
permit_editable_wheels: bool = False,
config_settings: Optional[Dict[str, str]] = None,
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
) -> InstallRequirement:
parts = parse_req_from_editable(editable_req)
return InstallRequirement(
@ -222,9 +249,8 @@ def install_req_from_editable(
constraint=constraint,
use_pep517=use_pep517,
isolated=isolated,
install_options=options.get("install_options", []) if options else [],
global_options=options.get("global_options", []) if options else [],
hash_options=options.get("hashes", {}) if options else {},
global_options=global_options,
hash_options=hash_options,
config_settings=config_settings,
extras=parts.extras,
)
@ -376,13 +402,15 @@ def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementPar
def install_req_from_line(
name: str,
comes_from: Optional[Union[str, InstallRequirement]] = None,
*,
use_pep517: Optional[bool] = None,
isolated: bool = False,
options: Optional[Dict[str, Any]] = None,
global_options: Optional[List[str]] = None,
hash_options: Optional[Dict[str, List[str]]] = None,
constraint: bool = False,
line_source: Optional[str] = None,
user_supplied: bool = False,
config_settings: Optional[Dict[str, str]] = None,
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
) -> InstallRequirement:
"""Creates an InstallRequirement from a name, which might be a
requirement, directory containing 'setup.py', filename, or URL.
@ -399,9 +427,8 @@ def install_req_from_line(
markers=parts.markers,
use_pep517=use_pep517,
isolated=isolated,
install_options=options.get("install_options", []) if options else [],
global_options=options.get("global_options", []) if options else [],
hash_options=options.get("hashes", {}) if options else {},
global_options=global_options,
hash_options=hash_options,
config_settings=config_settings,
constraint=constraint,
extras=parts.extras,
@ -415,7 +442,6 @@ def install_req_from_req_string(
isolated: bool = False,
use_pep517: Optional[bool] = None,
user_supplied: bool = False,
config_settings: Optional[Dict[str, str]] = None,
) -> InstallRequirement:
try:
req = get_requirement(req_string)
@ -445,7 +471,6 @@ def install_req_from_req_string(
isolated=isolated,
use_pep517=use_pep517,
user_supplied=user_supplied,
config_settings=config_settings,
)
@ -454,7 +479,7 @@ def install_req_from_parsed_requirement(
isolated: bool = False,
use_pep517: Optional[bool] = None,
user_supplied: bool = False,
config_settings: Optional[Dict[str, str]] = None,
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
) -> InstallRequirement:
if parsed_req.is_editable:
req = install_req_from_editable(
@ -473,7 +498,14 @@ def install_req_from_parsed_requirement(
comes_from=parsed_req.comes_from,
use_pep517=use_pep517,
isolated=isolated,
options=parsed_req.options,
global_options=(
parsed_req.options.get("global_options", [])
if parsed_req.options
else []
),
hash_options=(
parsed_req.options.get("hashes", {}) if parsed_req.options else {}
),
constraint=parsed_req.constraint,
line_source=parsed_req.line_source,
user_supplied=user_supplied,
@ -493,9 +525,52 @@ def install_req_from_link_and_ireq(
markers=ireq.markers,
use_pep517=ireq.use_pep517,
isolated=ireq.isolated,
install_options=ireq.install_options,
global_options=ireq.global_options,
hash_options=ireq.hash_options,
config_settings=ireq.config_settings,
user_supplied=ireq.user_supplied,
)
def install_req_drop_extras(ireq: InstallRequirement) -> InstallRequirement:
"""
Creates a new InstallationRequirement using the given template but without
any extras. Sets the original requirement as the new one's parent
(comes_from).
"""
return InstallRequirement(
req=(
_set_requirement_extras(ireq.req, set()) if ireq.req is not None else None
),
comes_from=ireq,
editable=ireq.editable,
link=ireq.link,
markers=ireq.markers,
use_pep517=ireq.use_pep517,
isolated=ireq.isolated,
global_options=ireq.global_options,
hash_options=ireq.hash_options,
constraint=ireq.constraint,
extras=[],
config_settings=ireq.config_settings,
user_supplied=ireq.user_supplied,
permit_editable_wheels=ireq.permit_editable_wheels,
)
def install_req_extend_extras(
ireq: InstallRequirement,
extras: Collection[str],
) -> InstallRequirement:
"""
Returns a copy of an installation requirement with some additional extras.
Makes a shallow copy of the ireq object.
"""
result = copy.copy(ireq)
result.extras = {*ireq.extras, *extras}
result.req = (
_set_requirement_extras(ireq.req, result.extras)
if ireq.req is not None
else None
)
return result

@ -2,6 +2,7 @@
Requirements file parsing
"""
import logging
import optparse
import os
import re
@ -69,14 +70,16 @@ SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [
# options to be passed to requirements
SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [
cmdoptions.install_options,
cmdoptions.global_options,
cmdoptions.hash,
cmdoptions.config_settings,
]
# the 'dest' string values
SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
logger = logging.getLogger(__name__)
class ParsedRequirement:
def __init__(
@ -166,7 +169,6 @@ def handle_requirement_line(
line: ParsedLine,
options: Optional[optparse.Values] = None,
) -> ParsedRequirement:
# preserve for the nested code path
line_comes_from = "{} {} (line {})".format(
"-c" if line.constraint else "-r",
@ -211,6 +213,12 @@ def handle_option_line(
options: Optional[optparse.Values] = None,
session: Optional[PipSession] = None,
) -> None:
if opts.hashes:
logger.warning(
"%s line %s has --hash but no requirement, and will be ignored.",
filename,
lineno,
)
if options:
# percolate options upward

@ -1,6 +1,3 @@
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
import functools
import logging
import os
@ -8,8 +5,8 @@ import shutil
import sys
import uuid
import zipfile
from enum import Enum
from optparse import Values
from pathlib import Path
from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
from pip._vendor.packaging.markers import Marker
@ -21,7 +18,7 @@ from pip._vendor.packaging.version import parse as parse_version
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
from pip._internal.exceptions import InstallationError, LegacyInstallFailure
from pip._internal.exceptions import InstallationError, PreviousBuildDirError
from pip._internal.locations import get_scheme
from pip._internal.metadata import (
BaseDistribution,
@ -40,15 +37,10 @@ from pip._internal.operations.build.metadata_legacy import (
from pip._internal.operations.install.editable_legacy import (
install_editable as install_editable_legacy,
)
from pip._internal.operations.install.legacy import install as install_legacy
from pip._internal.operations.install.wheel import install_wheel
from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
from pip._internal.req.req_uninstall import UninstallPathSet
from pip._internal.utils.deprecation import LegacyInstallReason, deprecated
from pip._internal.utils.direct_url_helpers import (
direct_url_for_editable,
direct_url_from_link,
)
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.hashes import Hashes
from pip._internal.utils.misc import (
ConfiguredBuildBackendHookCaller,
@ -56,11 +48,14 @@ from pip._internal.utils.misc import (
backup_dir,
display_path,
hide_url,
is_installable_dir,
redact_auth_from_requirement,
redact_auth_from_url,
)
from pip._internal.utils.packaging import safe_extra
from pip._internal.utils.subprocess import runner_with_spinner_message
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
from pip._internal.utils.unpacking import unpack_file
from pip._internal.utils.virtualenv import running_under_virtualenv
from pip._internal.vcs import vcs
@ -83,10 +78,10 @@ class InstallRequirement:
markers: Optional[Marker] = None,
use_pep517: Optional[bool] = None,
isolated: bool = False,
install_options: Optional[List[str]] = None,
*,
global_options: Optional[List[str]] = None,
hash_options: Optional[Dict[str, List[str]]] = None,
config_settings: Optional[Dict[str, str]] = None,
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
constraint: bool = False,
extras: Collection[str] = (),
user_supplied: bool = False,
@ -98,7 +93,6 @@ class InstallRequirement:
self.constraint = constraint
self.editable = editable
self.permit_editable_wheels = permit_editable_wheels
self.legacy_install_reason: Optional[LegacyInstallReason] = None
# source_dir is the local directory where the linked requirement is
# located, or unpacked. In case unpacking is needed, creating and
@ -111,11 +105,17 @@ class InstallRequirement:
if link.is_file:
self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
# original_link is the direct URL that was provided by the user for the
# requirement, either directly or via a constraints file.
if link is None and req and req.url:
# PEP 508 URL requirement
link = Link(req.url)
self.link = self.original_link = link
self.original_link_is_in_wheel_cache = False
# When this InstallRequirement is a wheel obtained from the cache of locally
# built wheels, this is the source link corresponding to the cache entry, which
# was used to download and build the cached wheel.
self.cached_wheel_source_link: Optional[Link] = None
# Information about the location of the artifact that was downloaded . This
# property is guaranteed to be set in resolver results.
@ -129,7 +129,7 @@ class InstallRequirement:
if extras:
self.extras = extras
elif req:
self.extras = {safe_extra(extra) for extra in req.extras}
self.extras = req.extras
else:
self.extras = set()
if markers is None and req:
@ -146,7 +146,6 @@ class InstallRequirement:
# Set to True after successful installation
self.install_succeeded: Optional[bool] = None
# Supplied options
self.install_options = install_options if install_options else []
self.global_options = global_options if global_options else []
self.hash_options = hash_options if hash_options else {}
self.config_settings = config_settings
@ -185,9 +184,12 @@ class InstallRequirement:
# This requirement needs more preparation before it can be built
self.needs_more_preparation = False
# This requirement needs to be unpacked before it can be installed.
self._archive_source: Optional[Path] = None
def __str__(self) -> str:
if self.req:
s = str(self.req)
s = redact_auth_from_requirement(self.req)
if self.link:
s += " from {}".format(redact_auth_from_url(self.link.url))
elif self.link:
@ -246,15 +248,22 @@ class InstallRequirement:
@property
def specifier(self) -> SpecifierSet:
assert self.req is not None
return self.req.specifier
@property
def is_direct(self) -> bool:
"""Whether this requirement was specified as a direct URL."""
return self.original_link is not None
@property
def is_pinned(self) -> bool:
"""Return whether I am pinned to an exact version.
For example, some-package==1.2 is pinned; some-package>1.2 is not.
"""
specifiers = self.specifier
assert self.req is not None
specifiers = self.req.specifier
return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
@ -264,7 +273,12 @@ class InstallRequirement:
extras_requested = ("",)
if self.markers is not None:
return any(
self.markers.evaluate({"extra": extra}) for extra in extras_requested
self.markers.evaluate({"extra": extra})
# TODO: Remove these two variants when packaging is upgraded to
# support the marker comparison logic specified in PEP 685.
or self.markers.evaluate({"extra": safe_extra(extra)})
or self.markers.evaluate({"extra": canonicalize_name(extra)})
for extra in extras_requested
)
else:
return True
@ -295,8 +309,14 @@ class InstallRequirement:
"""
good_hashes = self.hash_options.copy()
link = self.link if trust_internet else self.original_link
if trust_internet:
link = self.link
elif self.is_direct and self.user_supplied:
link = self.original_link
else:
link = None
if link and link.hash:
assert link.hash_name is not None
good_hashes.setdefault(link.hash_name, []).append(link.hash)
return Hashes(good_hashes)
@ -306,6 +326,7 @@ class InstallRequirement:
return None
s = str(self.req)
if self.comes_from:
comes_from: Optional[str]
if isinstance(self.comes_from, str):
comes_from = self.comes_from
else:
@ -337,7 +358,7 @@ class InstallRequirement:
# When parallel builds are enabled, add a UUID to the build directory
# name so multiple builds do not interfere with each other.
dir_name: str = canonicalize_name(self.name)
dir_name: str = canonicalize_name(self.req.name)
if parallel_builds:
dir_name = f"{dir_name}_{uuid.uuid4().hex}"
@ -380,6 +401,7 @@ class InstallRequirement:
)
def warn_on_mismatching_name(self) -> None:
assert self.req is not None
metadata_name = canonicalize_name(self.metadata["Name"])
if canonicalize_name(self.req.name) == metadata_name:
# Everything is fine.
@ -440,9 +462,16 @@ class InstallRequirement:
return False
return self.link.is_wheel
@property
def is_wheel_from_cache(self) -> bool:
# When True, it means that this InstallRequirement is a local wheel file in the
# cache of locally built wheels.
return self.cached_wheel_source_link is not None
# Things valid for sdists
@property
def unpacked_source_directory(self) -> str:
assert self.source_dir, f"No source dir for {self}"
return os.path.join(
self.source_dir, self.link and self.link.subdirectory_fragment or ""
)
@ -479,6 +508,15 @@ class InstallRequirement:
)
if pyproject_toml_data is None:
if self.config_settings:
deprecated(
reason=f"Config settings are ignored for project {self}.",
replacement=(
"to use --use-pep517 or add a "
"pyproject.toml file to the project"
),
gone_in="24.0",
)
self.use_pep517 = False
return
@ -520,7 +558,7 @@ class InstallRequirement:
Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
Under legacy processing, call setup.py egg-info.
"""
assert self.source_dir
assert self.source_dir, f"No source dir for {self}"
details = self.name or f"from {self.link}"
if self.use_pep517:
@ -569,8 +607,10 @@ class InstallRequirement:
if self.metadata_directory:
return get_directory_distribution(self.metadata_directory)
elif self.local_file_path and self.is_wheel:
assert self.req is not None
return get_wheel_distribution(
FilesystemWheel(self.local_file_path), canonicalize_name(self.name)
FilesystemWheel(self.local_file_path),
canonicalize_name(self.req.name),
)
raise AssertionError(
f"InstallRequirement {self} has no metadata directory and no wheel: "
@ -578,9 +618,9 @@ class InstallRequirement:
)
def assert_source_matches_version(self) -> None:
assert self.source_dir
assert self.source_dir, f"No source dir for {self}"
version = self.metadata["version"]
if self.req.specifier and version not in self.req.specifier:
if self.req and self.req.specifier and version not in self.req.specifier:
logger.warning(
"Requested %s, but installing version %s",
self,
@ -617,6 +657,27 @@ class InstallRequirement:
parallel_builds=parallel_builds,
)
def needs_unpacked_archive(self, archive_source: Path) -> None:
assert self._archive_source is None
self._archive_source = archive_source
def ensure_pristine_source_checkout(self) -> None:
"""Ensure the source directory has not yet been built in."""
assert self.source_dir is not None
if self._archive_source is not None:
unpack_file(str(self._archive_source), self.source_dir)
elif is_installable_dir(self.source_dir):
# If a checkout exists, it's unwise to keep going.
# version inconsistencies are logged later, but do not fail
# the installation.
raise PreviousBuildDirError(
f"pip can't proceed with requirements '{self}' due to a "
f"pre-existing build directory ({self.source_dir}). This is likely "
"due to a previous installation that failed . pip is "
"being responsible and not assuming it can delete this. "
"Please delete it and try again."
)
# For editable installations
def update_editable(self) -> None:
if not self.link:
@ -673,9 +734,10 @@ class InstallRequirement:
name = name.replace(os.path.sep, "/")
return name
assert self.req is not None
path = os.path.join(parentdir, path)
name = _clean_zip_name(path, rootdir)
return self.name + "/" + name
return self.req.name + "/" + name
def archive(self, build_dir: Optional[str]) -> None:
"""Saves archive to provided build_dir.
@ -746,7 +808,6 @@ class InstallRequirement:
def install(
self,
install_options: List[str],
global_options: Optional[Sequence[str]] = None,
root: Optional[str] = None,
home: Optional[str] = None,
@ -755,8 +816,9 @@ class InstallRequirement:
use_user_site: bool = False,
pycompile: bool = True,
) -> None:
assert self.req is not None
scheme = get_scheme(
self.name,
self.req.name,
user=use_user_site,
home=home,
root=root,
@ -764,15 +826,13 @@ class InstallRequirement:
prefix=prefix,
)
global_options = global_options if global_options is not None else []
if self.editable and not self.is_wheel:
install_editable_legacy(
install_options,
global_options,
global_options=global_options if global_options is not None else [],
prefix=prefix,
home=home,
use_user_site=use_user_site,
name=self.name,
name=self.req.name,
setup_py_path=self.setup_py_path,
isolated=self.isolated,
build_env=self.build_env,
@ -781,82 +841,23 @@ class InstallRequirement:
self.install_succeeded = True
return
if self.is_wheel:
assert self.local_file_path
direct_url = None
# TODO this can be refactored to direct_url = self.download_info
if self.editable:
direct_url = direct_url_for_editable(self.unpacked_source_directory)
elif self.original_link:
direct_url = direct_url_from_link(
self.original_link,
self.source_dir,
self.original_link_is_in_wheel_cache,
)
install_wheel(
self.name,
self.local_file_path,
scheme=scheme,
req_description=str(self.req),
pycompile=pycompile,
warn_script_location=warn_script_location,
direct_url=direct_url,
requested=self.user_supplied,
)
self.install_succeeded = True
return
# TODO: Why don't we do this for editable installs?
# Extend the list of global and install options passed on to
# the setup.py call with the ones from the requirements file.
# Options specified in requirements file override those
# specified on the command line, since the last option given
# to setup.py is the one that is used.
global_options = list(global_options) + self.global_options
install_options = list(install_options) + self.install_options
try:
if (
self.legacy_install_reason is not None
and self.legacy_install_reason.emit_before_install
):
self.legacy_install_reason.emit_deprecation(self.name)
success = install_legacy(
install_options=install_options,
global_options=global_options,
root=root,
home=home,
prefix=prefix,
use_user_site=use_user_site,
pycompile=pycompile,
scheme=scheme,
setup_py_path=self.setup_py_path,
isolated=self.isolated,
req_name=self.name,
build_env=self.build_env,
unpacked_source_directory=self.unpacked_source_directory,
req_description=str(self.req),
)
except LegacyInstallFailure as exc:
self.install_succeeded = False
raise exc
except Exception:
self.install_succeeded = True
raise
self.install_succeeded = success
if (
success
and self.legacy_install_reason is not None
and self.legacy_install_reason.emit_after_success
):
self.legacy_install_reason.emit_deprecation(self.name)
assert self.is_wheel
assert self.local_file_path
install_wheel(
self.req.name,
self.local_file_path,
scheme=scheme,
req_description=str(self.req),
pycompile=pycompile,
warn_script_location=warn_script_location,
direct_url=self.download_info if self.is_direct else None,
requested=self.user_supplied,
)
self.install_succeeded = True
def check_invalid_constraint_type(req: InstallRequirement) -> str:
# Check for unsupported forms
problem = ""
if not req.name:
@ -893,54 +894,21 @@ def _has_option(options: Values, reqs: List[InstallRequirement], option: str) ->
return False
def _install_option_ignored(
install_options: List[str], reqs: List[InstallRequirement]
) -> bool:
for req in reqs:
if (install_options or req.install_options) and not req.use_pep517:
return False
return True
class LegacySetupPyOptionsCheckMode(Enum):
INSTALL = 1
WHEEL = 2
DOWNLOAD = 3
def check_legacy_setup_py_options(
options: Values,
reqs: List[InstallRequirement],
mode: LegacySetupPyOptionsCheckMode,
) -> None:
has_install_options = _has_option(options, reqs, "install_options")
has_build_options = _has_option(options, reqs, "build_options")
has_global_options = _has_option(options, reqs, "global_options")
legacy_setup_py_options_present = (
has_install_options or has_build_options or has_global_options
)
if not legacy_setup_py_options_present:
return
options.format_control.disallow_binaries()
logger.warning(
"Implying --no-binary=:all: due to the presence of "
"--build-option / --global-option / --install-option. "
"Consider using --config-settings for more flexibility.",
)
if mode == LegacySetupPyOptionsCheckMode.INSTALL and has_install_options:
if _install_option_ignored(options.install_options, reqs):
logger.warning(
"Ignoring --install-option when building using PEP 517",
)
else:
deprecated(
reason=(
"--install-option is deprecated because "
"it forces pip to use the 'setup.py install' "
"command which is itself deprecated."
),
issue=11358,
replacement="to use --config-settings",
gone_in="23.1",
)
if has_build_options or has_global_options:
deprecated(
reason="--build-option and --global-option are deprecated.",
issue=11859,
replacement="to use --config-settings",
gone_in="24.0",
)
logger.warning(
"Implying --no-binary=:all: due to the presence of "
"--build-option / --global-option. "
)
options.format_control.disallow_binaries()

@ -2,9 +2,12 @@ import logging
from collections import OrderedDict
from typing import Dict, List
from pip._vendor.packaging.specifiers import LegacySpecifier
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.packaging.version import LegacyVersion
from pip._internal.req.req_install import InstallRequirement
from pip._internal.utils.deprecation import deprecated
logger = logging.getLogger(__name__)
@ -80,3 +83,37 @@ class RequirementSet:
for install_req in self.all_requirements
if not install_req.constraint and not install_req.satisfied_by
]
def warn_legacy_versions_and_specifiers(self) -> None:
for req in self.requirements_to_install:
version = req.get_dist().version
if isinstance(version, LegacyVersion):
deprecated(
reason=(
f"pip has selected the non standard version {version} "
f"of {req}. In the future this version will be "
f"ignored as it isn't standard compliant."
),
replacement=(
"set or update constraints to select another version "
"or contact the package author to fix the version number"
),
issue=12063,
gone_in="24.0",
)
for dep in req.get_dist().iter_dependencies():
if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
deprecated(
reason=(
f"pip has selected {req} {version} which has non "
f"standard dependency specifier {dep}. "
f"In the future this version of {req} will be "
f"ignored as it isn't standard compliant."
),
replacement=(
"set or update constraints to select another version "
"or contact the package author to fix the version number"
),
issue=12063,
gone_in="24.0",
)

@ -11,8 +11,9 @@ from pip._internal.metadata import BaseDistribution
from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.egg_link import egg_link_path_from_location
from pip._internal.utils.logging import getLogger, indent_log
from pip._internal.utils.misc import ask, is_local, normalize_path, renames, rmtree
from pip._internal.utils.misc import ask, normalize_path, renames, rmtree
from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
from pip._internal.utils.virtualenv import running_under_virtualenv
logger = getLogger(__name__)
@ -273,7 +274,7 @@ class StashedUninstallPathSet:
def commit(self) -> None:
"""Commits the uninstall by removing stashed files."""
for _, save_dir in self._save_dirs.items():
for save_dir in self._save_dirs.values():
save_dir.cleanup()
self._moves = []
self._save_dirs = {}
@ -312,6 +313,10 @@ class UninstallPathSet:
self._pth: Dict[str, UninstallPthEntries] = {}
self._dist = dist
self._moved_paths = StashedUninstallPathSet()
# Create local cache of normalize_path results. Creating an UninstallPathSet
# can result in hundreds/thousands of redundant calls to normalize_path with
# the same args, which hurts performance.
self._normalize_path_cached = functools.lru_cache()(normalize_path)
def _permitted(self, path: str) -> bool:
"""
@ -319,14 +324,17 @@ class UninstallPathSet:
remove/modify, False otherwise.
"""
return is_local(path)
# aka is_local, but caching normalized sys.prefix
if not running_under_virtualenv():
return True
return path.startswith(self._normalize_path_cached(sys.prefix))
def add(self, path: str) -> None:
head, tail = os.path.split(path)
# we normalize the head to resolve parent directory symlinks, but not
# the tail, since we only want to uninstall symlinks, not their targets
path = os.path.join(normalize_path(head), os.path.normcase(tail))
path = os.path.join(self._normalize_path_cached(head), os.path.normcase(tail))
if not os.path.exists(path):
return
@ -341,7 +349,7 @@ class UninstallPathSet:
self.add(cache_from_source(path))
def add_pth(self, pth_file: str, entry: str) -> None:
pth_file = normalize_path(pth_file)
pth_file = self._normalize_path_cached(pth_file)
if self._permitted(pth_file):
if pth_file not in self._pth:
self._pth[pth_file] = UninstallPthEntries(pth_file)
@ -531,12 +539,14 @@ class UninstallPathSet:
# above, so this only covers the setuptools-style editable.
with open(develop_egg_link) as fh:
link_pointer = os.path.normcase(fh.readline().strip())
normalized_link_pointer = normalize_path(link_pointer)
normalized_link_pointer = paths_to_remove._normalize_path_cached(
link_pointer
)
assert os.path.samefile(
normalized_link_pointer, normalized_dist_location
), (
f"Egg-link {link_pointer} does not match installed location of "
f"{dist.raw_name} (at {dist_location})"
f"Egg-link {develop_egg_link} (to {link_pointer}) does not match "
f"installed location of {dist.raw_name} (at {dist_location})"
)
paths_to_remove.add(develop_egg_link)
easy_install_pth = os.path.join(

@ -431,12 +431,12 @@ class Resolver(BaseResolver):
if cache_entry is not None:
logger.debug("Using cached wheel link: %s", cache_entry.link)
if req.link is req.original_link and cache_entry.persistent:
req.original_link_is_in_wheel_cache = True
req.cached_wheel_source_link = req.link
if cache_entry.origin is not None:
req.download_info = cache_entry.origin
else:
# Legacy cache entry that does not have origin.json.
# download_info may miss the archive_info.hash field.
# download_info may miss the archive_info.hashes field.
req.download_info = direct_url_from_link(
req.link, link_is_in_wheel_cache=cache_entry.persistent
)

@ -1,7 +1,7 @@
from typing import FrozenSet, Iterable, Optional, Tuple, Union
from pip._vendor.packaging.specifiers import SpecifierSet
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._vendor.packaging.utils import NormalizedName
from pip._vendor.packaging.version import LegacyVersion, Version
from pip._internal.models.link import Link, links_equivalent
@ -12,11 +12,11 @@ CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]]
CandidateVersion = Union[LegacyVersion, Version]
def format_name(project: str, extras: FrozenSet[str]) -> str:
def format_name(project: NormalizedName, extras: FrozenSet[NormalizedName]) -> str:
if not extras:
return project
canonical_extras = sorted(canonicalize_name(e) for e in extras)
return "{}[{}]".format(project, ",".join(canonical_extras))
extras_expr = ",".join(sorted(extras))
return f"{project}[{extras_expr}]"
class Constraint:

@ -65,15 +65,13 @@ def make_install_req_from_link(
use_pep517=template.use_pep517,
isolated=template.isolated,
constraint=template.constraint,
options=dict(
install_options=template.install_options,
global_options=template.global_options,
hashes=template.hash_options,
),
global_options=template.global_options,
hash_options=template.hash_options,
config_settings=template.config_settings,
)
ireq.original_link = template.original_link
ireq.link = link
ireq.extras = template.extras
return ireq
@ -81,7 +79,7 @@ def make_install_req_from_editable(
link: Link, template: InstallRequirement
) -> InstallRequirement:
assert template.editable, "template not editable"
return install_req_from_editable(
ireq = install_req_from_editable(
link.url,
user_supplied=template.user_supplied,
comes_from=template.comes_from,
@ -89,13 +87,12 @@ def make_install_req_from_editable(
isolated=template.isolated,
constraint=template.constraint,
permit_editable_wheels=template.permit_editable_wheels,
options=dict(
install_options=template.install_options,
global_options=template.global_options,
hashes=template.hash_options,
),
global_options=template.global_options,
hash_options=template.hash_options,
config_settings=template.config_settings,
)
ireq.extras = template.extras
return ireq
def _make_install_req_from_dist(
@ -114,11 +111,8 @@ def _make_install_req_from_dist(
use_pep517=template.use_pep517,
isolated=template.isolated,
constraint=template.constraint,
options=dict(
install_options=template.install_options,
global_options=template.global_options,
hashes=template.hash_options,
),
global_options=template.global_options,
hash_options=template.hash_options,
config_settings=template.config_settings,
)
ireq.satisfied_by = dist
@ -246,7 +240,7 @@ class _InstallRequirementBackedCandidate(Candidate):
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
requires = self.dist.iter_dependencies() if with_requires else ()
for r in requires:
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
yield from self._factory.make_requirements_from_spec(str(r), self._ireq)
yield self._factory.make_requires_python_requirement(self.dist.requires_python)
def get_install_requirement(self) -> Optional[InstallRequirement]:
@ -265,7 +259,7 @@ class LinkCandidate(_InstallRequirementBackedCandidate):
version: Optional[CandidateVersion] = None,
) -> None:
source_link = link
cache_entry = factory.get_wheel_cache_entry(link, name)
cache_entry = factory.get_wheel_cache_entry(source_link, name)
if cache_entry is not None:
logger.debug("Using cached wheel link: %s", cache_entry.link)
link = cache_entry.link
@ -283,13 +277,15 @@ class LinkCandidate(_InstallRequirementBackedCandidate):
)
if cache_entry is not None:
assert ireq.link.is_wheel
assert ireq.link.is_file
if cache_entry.persistent and template.link is template.original_link:
ireq.original_link_is_in_wheel_cache = True
ireq.cached_wheel_source_link = source_link
if cache_entry.origin is not None:
ireq.download_info = cache_entry.origin
else:
# Legacy cache entry that does not have origin.json.
# download_info may miss the archive_info.hash field.
# download_info may miss the archive_info.hashes field.
ireq.download_info = direct_url_from_link(
source_link, link_is_in_wheel_cache=cache_entry.persistent
)
@ -345,6 +341,7 @@ class AlreadyInstalledCandidate(Candidate):
self.dist = dist
self._ireq = _make_install_req_from_dist(dist, template)
self._factory = factory
self._version = None
# This is just logging some messages, so we can do it eagerly.
# The returned dist would be exactly the same as self.dist because we
@ -380,7 +377,9 @@ class AlreadyInstalledCandidate(Candidate):
@property
def version(self) -> CandidateVersion:
return self.dist.version
if self._version is None:
self._version = self.dist.version
return self._version
@property
def is_editable(self) -> bool:
@ -393,7 +392,7 @@ class AlreadyInstalledCandidate(Candidate):
if not with_requires:
return
for r in self.dist.iter_dependencies():
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
yield from self._factory.make_requirements_from_spec(str(r), self._ireq)
def get_install_requirement(self) -> Optional[InstallRequirement]:
return None
@ -428,9 +427,28 @@ class ExtrasCandidate(Candidate):
self,
base: BaseCandidate,
extras: FrozenSet[str],
*,
comes_from: Optional[InstallRequirement] = None,
) -> None:
"""
:param comes_from: the InstallRequirement that led to this candidate if it
differs from the base's InstallRequirement. This will often be the
case in the sense that this candidate's requirement has the extras
while the base's does not. Unlike the InstallRequirement backed
candidates, this requirement is used solely for reporting purposes,
it does not do any leg work.
"""
self.base = base
self.extras = extras
self.extras = frozenset(canonicalize_name(e) for e in extras)
# If any extras are requested in their non-normalized forms, keep track
# of their raw values. This is needed when we look up dependencies
# since PEP 685 has not been implemented for marker-matching, and using
# the non-normalized extra for lookup ensures the user can select a
# non-normalized extra in a package with its non-normalized form.
# TODO: Remove this attribute when packaging is upgraded to support the
# marker comparison logic specified in PEP 685.
self._unnormalized_extras = extras.difference(self.extras)
self._comes_from = comes_from if comes_from is not None else self.base._ireq
def __str__(self) -> str:
name, rest = str(self.base).split(" ", 1)
@ -481,6 +499,50 @@ class ExtrasCandidate(Candidate):
def source_link(self) -> Optional[Link]:
return self.base.source_link
def _warn_invalid_extras(
self,
requested: FrozenSet[str],
valid: FrozenSet[str],
) -> None:
"""Emit warnings for invalid extras being requested.
This emits a warning for each requested extra that is not in the
candidate's ``Provides-Extra`` list.
"""
invalid_extras_to_warn = frozenset(
extra
for extra in requested
if extra not in valid
# If an extra is requested in an unnormalized form, skip warning
# about the normalized form being missing.
and extra in self.extras
)
if not invalid_extras_to_warn:
return
for extra in sorted(invalid_extras_to_warn):
logger.warning(
"%s %s does not provide the extra '%s'",
self.base.name,
self.version,
extra,
)
def _calculate_valid_requested_extras(self) -> FrozenSet[str]:
"""Get a list of valid extras requested by this candidate.
The user (or upstream dependant) may have specified extras that the
candidate doesn't support. Any unsupported extras are dropped, and each
cause a warning to be logged here.
"""
requested_extras = self.extras.union(self._unnormalized_extras)
valid_extras = frozenset(
extra
for extra in requested_extras
if self.base.dist.is_extra_provided(extra)
)
self._warn_invalid_extras(requested_extras, valid_extras)
return valid_extras
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
factory = self.base._factory
@ -490,24 +552,13 @@ class ExtrasCandidate(Candidate):
if not with_requires:
return
# The user may have specified extras that the candidate doesn't
# support. We ignore any unsupported extras here.
valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras())
invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras())
for extra in sorted(invalid_extras):
logger.warning(
"%s %s does not provide the extra '%s'",
self.base.name,
self.version,
extra,
)
valid_extras = self._calculate_valid_requested_extras()
for r in self.base.dist.iter_dependencies(valid_extras):
requirement = factory.make_requirement_from_spec(
str(r), self.base._ireq, valid_extras
yield from factory.make_requirements_from_spec(
str(r),
self._comes_from,
valid_extras,
)
if requirement:
yield requirement
def get_install_requirement(self) -> Optional[InstallRequirement]:
# We don't return anything here, because we always

@ -62,6 +62,7 @@ from .requirements import (
ExplicitRequirement,
RequiresPythonRequirement,
SpecifierRequirement,
SpecifierWithoutExtrasRequirement,
UnsatisfiableRequirement,
)
@ -112,7 +113,7 @@ class Factory:
self._editable_candidate_cache: Cache[EditableCandidate] = {}
self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {}
self._extras_candidate_cache: Dict[
Tuple[int, FrozenSet[str]], ExtrasCandidate
Tuple[int, FrozenSet[NormalizedName]], ExtrasCandidate
] = {}
if not ignore_installed:
@ -132,19 +133,23 @@ class Factory:
if not link.is_wheel:
return
wheel = Wheel(link.filename)
if wheel.supported(self._finder.target_python.get_tags()):
if wheel.supported(self._finder.target_python.get_unsorted_tags()):
return
msg = f"{link.filename} is not a supported wheel on this platform."
raise UnsupportedWheel(msg)
def _make_extras_candidate(
self, base: BaseCandidate, extras: FrozenSet[str]
self,
base: BaseCandidate,
extras: FrozenSet[str],
*,
comes_from: Optional[InstallRequirement] = None,
) -> ExtrasCandidate:
cache_key = (id(base), extras)
cache_key = (id(base), frozenset(canonicalize_name(e) for e in extras))
try:
candidate = self._extras_candidate_cache[cache_key]
except KeyError:
candidate = ExtrasCandidate(base, extras)
candidate = ExtrasCandidate(base, extras, comes_from=comes_from)
self._extras_candidate_cache[cache_key] = candidate
return candidate
@ -161,7 +166,7 @@ class Factory:
self._installed_candidate_cache[dist.canonical_name] = base
if not extras:
return base
return self._make_extras_candidate(base, extras)
return self._make_extras_candidate(base, extras, comes_from=template)
def _make_candidate_from_link(
self,
@ -223,7 +228,7 @@ class Factory:
if not extras:
return base
return self._make_extras_candidate(base, extras)
return self._make_extras_candidate(base, extras, comes_from=template)
def _iter_found_candidates(
self,
@ -385,16 +390,21 @@ class Factory:
if ireq is not None:
ireqs.append(ireq)
# If the current identifier contains extras, add explicit candidates
# from entries from extra-less identifier.
# If the current identifier contains extras, add requires and explicit
# candidates from entries from extra-less identifier.
with contextlib.suppress(InvalidRequirement):
parsed_requirement = get_requirement(identifier)
explicit_candidates.update(
self._iter_explicit_candidates_from_base(
requirements.get(parsed_requirement.name, ()),
frozenset(parsed_requirement.extras),
),
)
if parsed_requirement.name != identifier:
explicit_candidates.update(
self._iter_explicit_candidates_from_base(
requirements.get(parsed_requirement.name, ()),
frozenset(parsed_requirement.extras),
),
)
for req in requirements.get(parsed_requirement.name, []):
_, ireq = req.get_candidate_lookup()
if ireq is not None:
ireqs.append(ireq)
# Add explicit candidates from constraints. We only do this if there are
# known ireqs, which represent requirements not already explicit. If
@ -437,37 +447,49 @@ class Factory:
and all(req.is_satisfied_by(c) for req in requirements[identifier])
)
def _make_requirement_from_install_req(
def _make_requirements_from_install_req(
self, ireq: InstallRequirement, requested_extras: Iterable[str]
) -> Optional[Requirement]:
) -> Iterator[Requirement]:
"""
Returns requirement objects associated with the given InstallRequirement. In
most cases this will be a single object but the following special cases exist:
- the InstallRequirement has markers that do not apply -> result is empty
- the InstallRequirement has both a constraint and extras -> result is split
in two requirement objects: one with the constraint and one with the
extra. This allows centralized constraint handling for the base,
resulting in fewer candidate rejections.
"""
if not ireq.match_markers(requested_extras):
logger.info(
"Ignoring %s: markers '%s' don't match your environment",
ireq.name,
ireq.markers,
)
return None
if not ireq.link:
return SpecifierRequirement(ireq)
self._fail_if_link_is_unsupported_wheel(ireq.link)
cand = self._make_candidate_from_link(
ireq.link,
extras=frozenset(ireq.extras),
template=ireq,
name=canonicalize_name(ireq.name) if ireq.name else None,
version=None,
)
if cand is None:
# There's no way we can satisfy a URL requirement if the underlying
# candidate fails to build. An unnamed URL must be user-supplied, so
# we fail eagerly. If the URL is named, an unsatisfiable requirement
# can make the resolver do the right thing, either backtrack (and
# maybe find some other requirement that's buildable) or raise a
# ResolutionImpossible eventually.
if not ireq.name:
raise self._build_failures[ireq.link]
return UnsatisfiableRequirement(canonicalize_name(ireq.name))
return self.make_requirement_from_candidate(cand)
elif not ireq.link:
if ireq.extras and ireq.req is not None and ireq.req.specifier:
yield SpecifierWithoutExtrasRequirement(ireq)
yield SpecifierRequirement(ireq)
else:
self._fail_if_link_is_unsupported_wheel(ireq.link)
cand = self._make_candidate_from_link(
ireq.link,
extras=frozenset(ireq.extras),
template=ireq,
name=canonicalize_name(ireq.name) if ireq.name else None,
version=None,
)
if cand is None:
# There's no way we can satisfy a URL requirement if the underlying
# candidate fails to build. An unnamed URL must be user-supplied, so
# we fail eagerly. If the URL is named, an unsatisfiable requirement
# can make the resolver do the right thing, either backtrack (and
# maybe find some other requirement that's buildable) or raise a
# ResolutionImpossible eventually.
if not ireq.name:
raise self._build_failures[ireq.link]
yield UnsatisfiableRequirement(canonicalize_name(ireq.name))
else:
yield self.make_requirement_from_candidate(cand)
def collect_root_requirements(
self, root_ireqs: List[InstallRequirement]
@ -488,15 +510,27 @@ class Factory:
else:
collected.constraints[name] = Constraint.from_ireq(ireq)
else:
req = self._make_requirement_from_install_req(
ireq,
requested_extras=(),
reqs = list(
self._make_requirements_from_install_req(
ireq,
requested_extras=(),
)
)
if req is None:
if not reqs:
continue
if ireq.user_supplied and req.name not in collected.user_requested:
collected.user_requested[req.name] = i
collected.requirements.append(req)
template = reqs[0]
if ireq.user_supplied and template.name not in collected.user_requested:
collected.user_requested[template.name] = i
collected.requirements.extend(reqs)
# Put requirements with extras at the end of the root requires. This does not
# affect resolvelib's picking preference but it does affect its initial criteria
# population: by putting extras at the end we enable the candidate finder to
# present resolvelib with a smaller set of candidates to resolvelib, already
# taking into account any non-transient constraints on the associated base. This
# means resolvelib will have fewer candidates to visit and reject.
# Python's list sort is stable, meaning relative order is kept for objects with
# the same key.
collected.requirements.sort(key=lambda r: r.name != r.project_name)
return collected
def make_requirement_from_candidate(
@ -504,14 +538,23 @@ class Factory:
) -> ExplicitRequirement:
return ExplicitRequirement(candidate)
def make_requirement_from_spec(
def make_requirements_from_spec(
self,
specifier: str,
comes_from: Optional[InstallRequirement],
requested_extras: Iterable[str] = (),
) -> Optional[Requirement]:
) -> Iterator[Requirement]:
"""
Returns requirement objects associated with the given specifier. In most cases
this will be a single object but the following special cases exist:
- the specifier has markers that do not apply -> result is empty
- the specifier has both a constraint and extras -> result is split
in two requirement objects: one with the constraint and one with the
extra. This allows centralized constraint handling for the base,
resulting in fewer candidate rejections.
"""
ireq = self._make_install_req_from_spec(specifier, comes_from)
return self._make_requirement_from_install_req(ireq, requested_extras)
return self._make_requirements_from_install_req(ireq, requested_extras)
def make_requires_python_requirement(
self,
@ -535,7 +578,7 @@ class Factory:
hash mismatches. Furthermore, cached wheels at present have
nondeterministic contents due to file modification times.
"""
if self._wheel_cache is None or self.preparer.require_hashes:
if self._wheel_cache is None:
return None
return self._wheel_cache.get_cache_entry(
link=link,
@ -603,8 +646,26 @@ class Factory:
cands = self._finder.find_all_candidates(req.project_name)
skipped_by_requires_python = self._finder.requires_python_skipped_reasons()
versions = [str(v) for v in sorted({c.version for c in cands})]
versions_set: Set[CandidateVersion] = set()
yanked_versions_set: Set[CandidateVersion] = set()
for c in cands:
is_yanked = c.link.is_yanked if c.link else False
if is_yanked:
yanked_versions_set.add(c.version)
else:
versions_set.add(c.version)
versions = [str(v) for v in sorted(versions_set)]
yanked_versions = [str(v) for v in sorted(yanked_versions_set)]
if yanked_versions:
# Saying "version X is yanked" isn't entirely accurate.
# https://github.com/pypa/pip/issues/11745#issuecomment-1402805842
logger.critical(
"Ignored the following yanked versions: %s",
", ".join(yanked_versions) or "none",
)
if skipped_by_requires_python:
logger.critical(
"Ignored the following versions that require a different python "
@ -632,7 +693,6 @@ class Factory:
e: "ResolutionImpossible[Requirement, Candidate]",
constraints: Dict[str, Constraint],
) -> InstallationError:
assert e.causes, "Installation error reported with no cause"
# If one of the things we can't solve is "we need Python X.Y",

@ -104,7 +104,7 @@ class PipProvider(_ProviderBase):
def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str:
return requirement_or_candidate.name
def get_preference( # type: ignore
def get_preference(
self,
identifier: str,
resolutions: Mapping[str, Candidate],
@ -124,14 +124,29 @@ class PipProvider(_ProviderBase):
* If equal, prefer if any requirement is "pinned", i.e. contains
operator ``===`` or ``==``.
* If equal, calculate an approximate "depth" and resolve requirements
closer to the user-specified requirements first.
closer to the user-specified requirements first. If the depth cannot
by determined (eg: due to no matching parents), it is considered
infinite.
* Order user-specified requirements by the order they are specified.
* If equal, prefers "non-free" requirements, i.e. contains at least one
operator, such as ``>=`` or ``<``.
* If equal, order alphabetically for consistency (helps debuggability).
"""
lookups = (r.get_candidate_lookup() for r, _ in information[identifier])
candidate, ireqs = zip(*lookups)
try:
next(iter(information[identifier]))
except StopIteration:
# There is no information for this identifier, so there's no known
# candidates.
has_information = False
else:
has_information = True
if has_information:
lookups = (r.get_candidate_lookup() for r, _ in information[identifier])
candidate, ireqs = zip(*lookups)
else:
candidate, ireqs = None, ()
operators = [
specifier.operator
for specifier_set in (ireq.specifier for ireq in ireqs if ireq)
@ -146,11 +161,14 @@ class PipProvider(_ProviderBase):
requested_order: Union[int, float] = self._user_requested[identifier]
except KeyError:
requested_order = math.inf
parent_depths = (
self._known_depths[parent.name] if parent is not None else 0.0
for _, parent in information[identifier]
)
inferred_depth = min(d for d in parent_depths) + 1.0
if has_information:
parent_depths = (
self._known_depths[parent.name] if parent is not None else 0.0
for _, parent in information[identifier]
)
inferred_depth = min(d for d in parent_depths) + 1.0
else:
inferred_depth = math.inf
else:
inferred_depth = 1.0
self._known_depths[identifier] = inferred_depth
@ -161,16 +179,6 @@ class PipProvider(_ProviderBase):
# free, so we always do it first to avoid needless work if it fails.
requires_python = identifier == REQUIRES_PYTHON_IDENTIFIER
# HACK: Setuptools have a very long and solid backward compatibility
# track record, and extremely few projects would request a narrow,
# non-recent version range of it since that would break a lot things.
# (Most projects specify it only to request for an installer feature,
# which does not work, but that's another topic.) Intentionally
# delaying Setuptools helps reduce branches the resolver has to check.
# This serves as a temporary fix for issues like "apache-airflow[all]"
# while we work on "proper" branch pruning techniques.
delay_this = identifier == "setuptools"
# Prefer the causes of backtracking on the assumption that the problem
# resolving the dependency tree is related to the failures that caused
# the backtracking
@ -178,7 +186,6 @@ class PipProvider(_ProviderBase):
return (
not requires_python,
delay_this,
not direct,
not pinned,
not backtrack_cause,

@ -11,16 +11,16 @@ logger = getLogger(__name__)
class PipReporter(BaseReporter):
def __init__(self) -> None:
self.backtracks_by_package: DefaultDict[str, int] = defaultdict(int)
self.reject_count_by_package: DefaultDict[str, int] = defaultdict(int)
self._messages_at_backtrack = {
self._messages_at_reject_count = {
1: (
"pip is looking at multiple versions of {package_name} to "
"determine which version is compatible with other "
"requirements. This could take a while."
),
8: (
"pip is looking at multiple versions of {package_name} to "
"pip is still looking at multiple versions of {package_name} to "
"determine which version is compatible with other "
"requirements. This could take a while."
),
@ -32,16 +32,28 @@ class PipReporter(BaseReporter):
),
}
def backtracking(self, candidate: Candidate) -> None:
self.backtracks_by_package[candidate.name] += 1
def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None:
self.reject_count_by_package[candidate.name] += 1
count = self.backtracks_by_package[candidate.name]
if count not in self._messages_at_backtrack:
count = self.reject_count_by_package[candidate.name]
if count not in self._messages_at_reject_count:
return
message = self._messages_at_backtrack[count]
message = self._messages_at_reject_count[count]
logger.info("INFO: %s", message.format(package_name=candidate.name))
msg = "Will try a different candidate, due to conflict:"
for req_info in criterion.information:
req, parent = req_info.requirement, req_info.parent
# Inspired by Factory.get_installation_error
msg += "\n "
if parent:
msg += f"{parent.name} {parent.version} depends on "
else:
msg += "The user requested "
msg += req.format_for_error()
logger.debug(msg)
class PipDebuggingReporter(BaseReporter):
"""A reporter that does an info log for every event it sees."""
@ -61,8 +73,8 @@ class PipDebuggingReporter(BaseReporter):
def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None:
logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent)
def backtracking(self, candidate: Candidate) -> None:
logger.info("Reporter.backtracking(%r)", candidate)
def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None:
logger.info("Reporter.rejecting_candidate(%r, %r)", criterion, candidate)
def pinning(self, candidate: Candidate) -> None:
logger.info("Reporter.pinning(%r)", candidate)

@ -1,6 +1,7 @@
from pip._vendor.packaging.specifiers import SpecifierSet
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._internal.req.constructors import install_req_drop_extras
from pip._internal.req.req_install import InstallRequirement
from .base import Candidate, CandidateLookup, Requirement, format_name
@ -43,7 +44,7 @@ class SpecifierRequirement(Requirement):
def __init__(self, ireq: InstallRequirement) -> None:
assert ireq.link is None, "This is a link, not a specifier"
self._ireq = ireq
self._extras = frozenset(ireq.extras)
self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras)
def __str__(self) -> str:
return str(self._ireq.req)
@ -64,7 +65,6 @@ class SpecifierRequirement(Requirement):
return format_name(self.project_name, self._extras)
def format_for_error(self) -> str:
# Convert comma-separated specifiers into "A, B, ..., F and G"
# This makes the specifier a bit more "human readable", without
# risking a change in meaning. (Hopefully! Not all edge cases have
@ -93,6 +93,18 @@ class SpecifierRequirement(Requirement):
return spec.contains(candidate.version, prereleases=True)
class SpecifierWithoutExtrasRequirement(SpecifierRequirement):
"""
Requirement backed by an install requirement on a base package.
Trims extras from its install requirement if there are any.
"""
def __init__(self, ireq: InstallRequirement) -> None:
assert ireq.link is None, "This is a link, not a specifier"
self._ireq = install_req_drop_extras(ireq)
self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras)
class RequiresPythonRequirement(Requirement):
"""A requirement representing Requires-Python metadata."""

@ -1,3 +1,4 @@
import contextlib
import functools
import logging
import os
@ -11,6 +12,7 @@ from pip._vendor.resolvelib.structs import DirectedGraph
from pip._internal.cache import WheelCache
from pip._internal.index.package_finder import PackageFinder
from pip._internal.operations.prepare import RequirementPreparer
from pip._internal.req.constructors import install_req_extend_extras
from pip._internal.req.req_install import InstallRequirement
from pip._internal.req.req_set import RequirementSet
from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
@ -19,6 +21,7 @@ from pip._internal.resolution.resolvelib.reporter import (
PipDebuggingReporter,
PipReporter,
)
from pip._internal.utils.packaging import get_requirement
from .base import Candidate, Requirement
from .factory import Factory
@ -88,9 +91,9 @@ class Resolver(BaseResolver):
)
try:
try_to_avoid_resolution_too_deep = 2000000
limit_how_complex_resolution_can_be = 200000
result = self._result = resolver.resolve(
collected.requirements, max_rounds=try_to_avoid_resolution_too_deep
collected.requirements, max_rounds=limit_how_complex_resolution_can_be
)
except ResolutionImpossible as e:
@ -101,9 +104,24 @@ class Resolver(BaseResolver):
raise error from e
req_set = RequirementSet(check_supported_wheels=check_supported_wheels)
for candidate in result.mapping.values():
# process candidates with extras last to ensure their base equivalent is
# already in the req_set if appropriate.
# Python's sort is stable so using a binary key function keeps relative order
# within both subsets.
for candidate in sorted(
result.mapping.values(), key=lambda c: c.name != c.project_name
):
ireq = candidate.get_install_requirement()
if ireq is None:
if candidate.name != candidate.project_name:
# extend existing req's extras
with contextlib.suppress(KeyError):
req = req_set.get_requirement(candidate.project_name)
req_set.add_named_requirement(
install_req_extend_extras(
req, get_requirement(candidate.name).extras
)
)
continue
# Check if there is already an installation under the same name,
@ -159,6 +177,9 @@ class Resolver(BaseResolver):
reqs = req_set.all_requirements
self.factory.preparer.prepare_linked_requirements_more(reqs)
for req in reqs:
req.prepared = True
req.needs_more_preparation = False
return req_set
def get_installation_order(

@ -28,8 +28,7 @@ from pip._internal.utils.entrypoints import (
from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace
from pip._internal.utils.misc import ensure_dir
_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
_WEEK = datetime.timedelta(days=7)
logger = logging.getLogger(__name__)
@ -73,12 +72,10 @@ class SelfCheckState:
if "pypi_version" not in self._state:
return None
seven_days_in_seconds = 7 * 24 * 60 * 60
# Determine if we need to refresh the state
last_check = datetime.datetime.strptime(self._state["last_check"], _DATE_FMT)
seconds_since_last_check = (current_time - last_check).total_seconds()
if seconds_since_last_check > seven_days_in_seconds:
last_check = datetime.datetime.fromisoformat(self._state["last_check"])
time_since_last_check = current_time - last_check
if time_since_last_check > _WEEK:
return None
return self._state["pypi_version"]
@ -100,7 +97,7 @@ class SelfCheckState:
# Include the key so it's easy to tell which pip wrote the
# file.
"key": self.key,
"last_check": current_time.strftime(_DATE_FMT),
"last_check": current_time.isoformat(),
"pypi_version": pypi_version,
}
@ -229,14 +226,14 @@ def pip_self_version_check(session: PipSession, options: optparse.Values) -> Non
try:
upgrade_prompt = _self_version_check_logic(
state=SelfCheckState(cache_dir=options.cache_dir),
current_time=datetime.datetime.utcnow(),
current_time=datetime.datetime.now(datetime.timezone.utc),
local_version=installed_dist.version,
get_remote_version=functools.partial(
_get_current_remote_pip_version, session, options
),
)
if upgrade_prompt is not None:
logger.warning("[present-rich] %s", upgrade_prompt)
logger.warning("%s", upgrade_prompt, extra={"rich": True})
except Exception:
logger.warning("There was an error checking the latest version of pip.")
logger.debug("See below for error", exc_info=True)

@ -118,71 +118,3 @@ def deprecated(
raise PipDeprecationWarning(message)
warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
class LegacyInstallReason:
def __init__(
self,
reason: str,
replacement: Optional[str] = None,
gone_in: Optional[str] = None,
feature_flag: Optional[str] = None,
issue: Optional[int] = None,
emit_after_success: bool = False,
emit_before_install: bool = False,
):
self._reason = reason
self._replacement = replacement
self._gone_in = gone_in
self._feature_flag = feature_flag
self._issue = issue
self.emit_after_success = emit_after_success
self.emit_before_install = emit_before_install
def emit_deprecation(self, name: str) -> None:
deprecated(
reason=self._reason.format(name=name),
replacement=self._replacement,
gone_in=self._gone_in,
feature_flag=self._feature_flag,
issue=self._issue,
)
LegacyInstallReasonFailedBdistWheel = LegacyInstallReason(
reason=(
"{name} was installed using the legacy 'setup.py install' "
"method, because a wheel could not be built for it."
),
replacement="to fix the wheel build issue reported above",
gone_in="23.1",
issue=8368,
emit_after_success=True,
)
LegacyInstallReasonMissingWheelPackage = LegacyInstallReason(
reason=(
"{name} is being installed using the legacy "
"'setup.py install' method, because it does not have a "
"'pyproject.toml' and the 'wheel' package "
"is not installed."
),
replacement="to enable the '--use-pep517' option",
gone_in="23.1",
issue=8559,
emit_before_install=True,
)
LegacyInstallReasonNoBinaryForcesSetuptoolsInstall = LegacyInstallReason(
reason=(
"{name} is being installed using the legacy "
"'setup.py install' method, because the '--no-binary' option was enabled "
"for it and this currently disables local wheel building for projects that "
"don't have a 'pyproject.toml' file."
),
replacement="to enable the '--use-pep517' option",
gone_in="23.1",
issue=11451,
emit_before_install=True,
)

@ -1,43 +0,0 @@
from getopt import GetoptError, getopt
from typing import Dict, List
_options = [
"exec-prefix=",
"home=",
"install-base=",
"install-data=",
"install-headers=",
"install-lib=",
"install-platlib=",
"install-purelib=",
"install-scripts=",
"prefix=",
"root=",
"user",
]
def parse_distutils_args(args: List[str]) -> Dict[str, str]:
"""Parse provided arguments, returning an object that has the matched arguments.
Any unknown arguments are ignored.
"""
result = {}
for arg in args:
try:
parsed_opt, _ = getopt(args=[arg], shortopts="", longopts=_options)
except GetoptError:
# We don't care about any other options, which here may be
# considered unrecognized since our option list is not
# exhaustive.
continue
if not parsed_opt:
continue
option = parsed_opt[0]
name_from_parsed = option[0][2:].replace("-", "_")
value_from_parsed = option[1] or "true"
result[name_from_parsed] = value_from_parsed
return result

@ -1,6 +1,3 @@
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
import os
import sys
from typing import Optional, Tuple
@ -20,8 +17,11 @@ def glibc_version_string_confstr() -> Optional[str]:
if sys.platform == "win32":
return None
try:
gnu_libc_version = os.confstr("CS_GNU_LIBC_VERSION")
if gnu_libc_version is None:
return None
# os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":
_, version = os.confstr("CS_GNU_LIBC_VERSION").split()
_, version = gnu_libc_version.split()
except (AttributeError, OSError, ValueError):
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
return None

@ -105,6 +105,13 @@ class Hashes:
with open(path, "rb") as file:
return self.check_against_file(file)
def has_one_of(self, hashes: Dict[str, str]) -> bool:
"""Return whether any of the given hashes are allowed."""
for hash_name, hex_digest in hashes.items():
if self.is_hash_allowed(hash_name, hex_digest):
return True
return False
def __bool__(self) -> bool:
"""Return whether I know any known-good hashes."""
return bool(self._allowed)

@ -1,35 +0,0 @@
"""A helper module that injects SecureTransport, on import.
The import should be done as early as possible, to ensure all requests and
sessions (or whatever) are created after injecting SecureTransport.
Note that we only do the injection on macOS, when the linked OpenSSL is too
old to handle TLSv1.2.
"""
import sys
def inject_securetransport() -> None:
# Only relevant on macOS
if sys.platform != "darwin":
return
try:
import ssl
except ImportError:
return
# Checks for OpenSSL 1.0.1
if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100F:
return
try:
from pip._vendor.urllib3.contrib import securetransport
except (ImportError, OSError):
return
securetransport.inject_into_urllib3()
inject_securetransport()

@ -155,8 +155,8 @@ class RichPipStreamHandler(RichHandler):
# If we are given a diagnostic error to present, present it with indentation.
assert isinstance(record.args, tuple)
if record.msg == "[present-rich] %s" and len(record.args) == 1:
rich_renderable = record.args[0]
if getattr(record, "rich", False):
(rich_renderable,) = record.args
assert isinstance(
rich_renderable, (ConsoleRenderable, RichCast, str)
), f"{rich_renderable} is not rich-console-renderable"

@ -1,6 +1,3 @@
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
import contextlib
import errno
import getpass
@ -14,9 +11,11 @@ import stat
import sys
import sysconfig
import urllib.parse
from functools import partial
from io import StringIO
from itertools import filterfalse, tee, zip_longest
from types import TracebackType
from pathlib import Path
from types import FunctionType, TracebackType
from typing import (
Any,
BinaryIO,
@ -32,9 +31,11 @@ from typing import (
Tuple,
Type,
TypeVar,
Union,
cast,
)
from pip._vendor.packaging.requirements import Requirement
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
@ -68,6 +69,8 @@ T = TypeVar("T")
ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType]
VersionInfo = Tuple[int, int, int]
NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]]
OnExc = Callable[[FunctionType, Path, BaseException], Any]
OnErr = Callable[[FunctionType, Path, ExcInfo], Any]
def get_pip_version() -> str:
@ -125,28 +128,75 @@ def get_prog() -> str:
# Retry every half second for up to 3 seconds
# Tenacity raises RetryError by default, explicitly raise the original exception
@retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5))
def rmtree(dir: str, ignore_errors: bool = False) -> None:
shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler)
def rmtree(
dir: str,
ignore_errors: bool = False,
onexc: Optional[OnExc] = None,
) -> None:
if ignore_errors:
onexc = _onerror_ignore
if onexc is None:
onexc = _onerror_reraise
handler: OnErr = partial(
# `[func, path, Union[ExcInfo, BaseException]] -> Any` is equivalent to
# `Union[([func, path, ExcInfo] -> Any), ([func, path, BaseException] -> Any)]`.
cast(Union[OnExc, OnErr], rmtree_errorhandler),
onexc=onexc,
)
if sys.version_info >= (3, 12):
# See https://docs.python.org/3.12/whatsnew/3.12.html#shutil.
shutil.rmtree(dir, onexc=handler)
else:
shutil.rmtree(dir, onerror=handler)
def _onerror_ignore(*_args: Any) -> None:
pass
def _onerror_reraise(*_args: Any) -> None:
raise
def rmtree_errorhandler(func: Callable[..., Any], path: str, exc_info: ExcInfo) -> None:
"""On Windows, the files in .svn are read-only, so when rmtree() tries to
remove them, an exception is thrown. We catch that here, remove the
read-only attribute, and hopefully continue without problems."""
def rmtree_errorhandler(
func: FunctionType,
path: Path,
exc_info: Union[ExcInfo, BaseException],
*,
onexc: OnExc = _onerror_reraise,
) -> None:
"""
`rmtree` error handler to 'force' a file remove (i.e. like `rm -f`).
* If a file is readonly then it's write flag is set and operation is
retried.
* `onerror` is the original callback from `rmtree(... onerror=onerror)`
that is chained at the end if the "rm -f" still fails.
"""
try:
has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE)
st_mode = os.stat(path).st_mode
except OSError:
# it's equivalent to os.path.exists
return
if has_attr_readonly:
if not st_mode & stat.S_IWRITE:
# convert to read/write
os.chmod(path, stat.S_IWRITE)
# use the original function to repeat the operation
func(path)
return
else:
raise
try:
os.chmod(path, st_mode | stat.S_IWRITE)
except OSError:
pass
else:
# use the original function to repeat the operation
try:
func(path)
return
except OSError:
pass
if not isinstance(exc_info, BaseException):
_, exc_info, _ = exc_info
onexc(func, path, exc_info)
def display_path(path: str) -> str:
@ -338,17 +388,18 @@ def write_output(msg: Any, *args: Any) -> None:
class StreamWrapper(StringIO):
orig_stream: TextIO = None
orig_stream: TextIO
@classmethod
def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper":
cls.orig_stream = orig_stream
return cls()
ret = cls()
ret.orig_stream = orig_stream
return ret
# compileall.compile_dir() needs stdout.encoding to print to stdout
# https://github.com/python/mypy/issues/4125
# type ignore is because TextIOBase.encoding is writeable
@property
def encoding(self): # type: ignore
def encoding(self) -> str: # type: ignore
return self.orig_stream.encoding
@ -416,7 +467,7 @@ def build_url_from_netloc(netloc: str, scheme: str = "https") -> str:
return f"{scheme}://{netloc}"
def parse_netloc(netloc: str) -> Tuple[str, Optional[int]]:
def parse_netloc(netloc: str) -> Tuple[Optional[str], Optional[int]]:
"""
Return the host-port pair from a netloc.
"""
@ -504,7 +555,9 @@ def _redact_netloc(netloc: str) -> Tuple[str]:
return (redact_netloc(netloc),)
def split_auth_netloc_from_url(url: str) -> Tuple[str, str, Tuple[str, str]]:
def split_auth_netloc_from_url(
url: str,
) -> Tuple[str, str, Tuple[Optional[str], Optional[str]]]:
"""
Parse a url into separate netloc, auth, and url with no auth.
@ -526,6 +579,13 @@ def redact_auth_from_url(url: str) -> str:
return _transform_url(url, _redact_netloc)[0]
def redact_auth_from_requirement(req: Requirement) -> str:
"""Replace the password in a given requirement url with ****."""
if not req.url:
return str(req)
return str(req).replace(req.url, redact_auth_from_url(req.url))
class HiddenText:
def __init__(self, secret: str, redacted: str) -> None:
self.secret = secret
@ -614,18 +674,6 @@ def hash_file(path: str, blocksize: int = 1 << 20) -> Tuple[Any, int]:
return h, length
def is_wheel_installed() -> bool:
"""
Return whether the wheel package is installed.
"""
try:
import wheel # noqa: F401
except ImportError:
return False
return True
def pairwise(iterable: Iterable[Any]) -> Iterator[Tuple[Any, Any]]:
"""
Return paired elements.
@ -669,7 +717,7 @@ class ConfiguredBuildBackendHookCaller(BuildBackendHookCaller):
def build_wheel(
self,
wheel_directory: str,
config_settings: Optional[Dict[str, str]] = None,
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
metadata_directory: Optional[str] = None,
) -> str:
cs = self.config_holder.config_settings
@ -678,7 +726,9 @@ class ConfiguredBuildBackendHookCaller(BuildBackendHookCaller):
)
def build_sdist(
self, sdist_directory: str, config_settings: Optional[Dict[str, str]] = None
self,
sdist_directory: str,
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
) -> str:
cs = self.config_holder.config_settings
return super().build_sdist(sdist_directory, config_settings=cs)
@ -686,7 +736,7 @@ class ConfiguredBuildBackendHookCaller(BuildBackendHookCaller):
def build_editable(
self,
wheel_directory: str,
config_settings: Optional[Dict[str, str]] = None,
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
metadata_directory: Optional[str] = None,
) -> str:
cs = self.config_holder.config_settings
@ -695,19 +745,19 @@ class ConfiguredBuildBackendHookCaller(BuildBackendHookCaller):
)
def get_requires_for_build_wheel(
self, config_settings: Optional[Dict[str, str]] = None
self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None
) -> List[str]:
cs = self.config_holder.config_settings
return super().get_requires_for_build_wheel(config_settings=cs)
def get_requires_for_build_sdist(
self, config_settings: Optional[Dict[str, str]] = None
self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None
) -> List[str]:
cs = self.config_holder.config_settings
return super().get_requires_for_build_sdist(config_settings=cs)
def get_requires_for_build_editable(
self, config_settings: Optional[Dict[str, str]] = None
self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None
) -> List[str]:
cs = self.config_holder.config_settings
return super().get_requires_for_build_editable(config_settings=cs)
@ -715,7 +765,7 @@ class ConfiguredBuildBackendHookCaller(BuildBackendHookCaller):
def prepare_metadata_for_build_wheel(
self,
metadata_directory: str,
config_settings: Optional[Dict[str, str]] = None,
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
_allow_fallback: bool = True,
) -> str:
cs = self.config_holder.config_settings
@ -728,7 +778,7 @@ class ConfiguredBuildBackendHookCaller(BuildBackendHookCaller):
def prepare_metadata_for_build_editable(
self,
metadata_directory: str,
config_settings: Optional[Dict[str, str]] = None,
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
_allow_fallback: bool = True,
) -> str:
cs = self.config_holder.config_settings

@ -103,8 +103,8 @@ def make_setuptools_clean_args(
def make_setuptools_develop_args(
setup_py_path: str,
*,
global_options: Sequence[str],
install_options: Sequence[str],
no_user_config: bool,
prefix: Optional[str],
home: Optional[str],
@ -120,8 +120,6 @@ def make_setuptools_develop_args(
args += ["develop", "--no-deps"]
args += install_options
if prefix:
args += ["--prefix", prefix]
if home is not None:
@ -146,50 +144,3 @@ def make_setuptools_egg_info_args(
args += ["--egg-base", egg_info_dir]
return args
def make_setuptools_install_args(
setup_py_path: str,
global_options: Sequence[str],
install_options: Sequence[str],
record_filename: str,
root: Optional[str],
prefix: Optional[str],
header_dir: Optional[str],
home: Optional[str],
use_user_site: bool,
no_user_config: bool,
pycompile: bool,
) -> List[str]:
assert not (use_user_site and prefix)
assert not (use_user_site and root)
args = make_setuptools_shim_args(
setup_py_path,
global_options=global_options,
no_user_config=no_user_config,
unbuffered_output=True,
)
args += ["install", "--record", record_filename]
args += ["--single-version-externally-managed"]
if root is not None:
args += ["--root", root]
if prefix is not None:
args += ["--prefix", prefix]
if home is not None:
args += ["--home", home]
if use_user_site:
args += ["--user", "--prefix="]
if pycompile:
args += ["--compile"]
else:
args += ["--no-compile"]
if header_dir:
args += ["--install-headers", header_dir]
args += install_options
return args

@ -209,7 +209,7 @@ def call_subprocess(
output_lines=all_output if not showing_subprocess else None,
)
if log_failed_cmd:
subprocess_logger.error("[present-rich] %s", error)
subprocess_logger.error("%s", error, extra={"rich": True})
subprocess_logger.verbose(
"[bold magenta]full command[/]: [blue]%s[/]",
escape(format_command_args(cmd)),

@ -3,8 +3,19 @@ import itertools
import logging
import os.path
import tempfile
import traceback
from contextlib import ExitStack, contextmanager
from typing import Any, Dict, Generator, Optional, TypeVar, Union
from pathlib import Path
from typing import (
Any,
Callable,
Dict,
Generator,
List,
Optional,
TypeVar,
Union,
)
from pip._internal.utils.misc import enum, rmtree
@ -106,6 +117,7 @@ class TempDirectory:
delete: Union[bool, None, _Default] = _default,
kind: str = "temp",
globally_managed: bool = False,
ignore_cleanup_errors: bool = True,
):
super().__init__()
@ -128,6 +140,7 @@ class TempDirectory:
self._deleted = False
self.delete = delete
self.kind = kind
self.ignore_cleanup_errors = ignore_cleanup_errors
if globally_managed:
assert _tempdir_manager is not None
@ -170,7 +183,44 @@ class TempDirectory:
self._deleted = True
if not os.path.exists(self._path):
return
rmtree(self._path)
errors: List[BaseException] = []
def onerror(
func: Callable[..., Any],
path: Path,
exc_val: BaseException,
) -> None:
"""Log a warning for a `rmtree` error and continue"""
formatted_exc = "\n".join(
traceback.format_exception_only(type(exc_val), exc_val)
)
formatted_exc = formatted_exc.rstrip() # remove trailing new line
if func in (os.unlink, os.remove, os.rmdir):
logger.debug(
"Failed to remove a temporary file '%s' due to %s.\n",
path,
formatted_exc,
)
else:
logger.debug("%s failed with %s.", func.__qualname__, formatted_exc)
errors.append(exc_val)
if self.ignore_cleanup_errors:
try:
# first try with tenacity; retrying to handle ephemeral errors
rmtree(self._path, ignore_errors=False)
except OSError:
# last pass ignore/log all errors
rmtree(self._path, onexc=onerror)
if errors:
logger.warning(
"Failed to remove contents in a temporary directory '%s'.\n"
"You can safely remove it manually.",
self._path,
)
else:
rmtree(self._path)
class AdjacentTempDirectory(TempDirectory):

@ -101,7 +101,7 @@ class Git(VersionControl):
if not match:
logger.warning("Can't parse git version: %s", version)
return ()
return tuple(int(c) for c in match.groups())
return (int(match.group(1)), int(match.group(2)))
@classmethod
def get_current_branch(cls, location: str) -> Optional[str]:

@ -31,7 +31,7 @@ class Mercurial(VersionControl):
@staticmethod
def get_base_rev_args(rev: str) -> List[str]:
return [rev]
return [f"-r={rev}"]
def fetch_new(
self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int

@ -5,7 +5,7 @@ import logging
import os.path
import re
import shutil
from typing import Callable, Iterable, List, Optional, Tuple
from typing import Iterable, List, Optional, Tuple
from pip._vendor.packaging.utils import canonicalize_name, canonicalize_version
from pip._vendor.packaging.version import InvalidVersion, Version
@ -19,12 +19,8 @@ from pip._internal.operations.build.wheel import build_wheel_pep517
from pip._internal.operations.build.wheel_editable import build_wheel_editable
from pip._internal.operations.build.wheel_legacy import build_wheel_legacy
from pip._internal.req.req_install import InstallRequirement
from pip._internal.utils.deprecation import (
LegacyInstallReasonMissingWheelPackage,
LegacyInstallReasonNoBinaryForcesSetuptoolsInstall,
)
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import ensure_dir, hash_file, is_wheel_installed
from pip._internal.utils.misc import ensure_dir, hash_file
from pip._internal.utils.setuptools_build import make_setuptools_clean_args
from pip._internal.utils.subprocess import call_subprocess
from pip._internal.utils.temp_dir import TempDirectory
@ -35,7 +31,6 @@ logger = logging.getLogger(__name__)
_egg_info_re = re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.IGNORECASE)
BdistWheelAllowedPredicate = Callable[[InstallRequirement], bool]
BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]]
@ -50,7 +45,6 @@ def _contains_egg_info(s: str) -> bool:
def _should_build(
req: InstallRequirement,
need_wheel: bool,
check_bdist_wheel: Optional[BdistWheelAllowedPredicate] = None,
) -> bool:
"""Return whether an InstallRequirement should be built into a wheel."""
if req.constraint:
@ -78,24 +72,6 @@ def _should_build(
# we only build PEP 660 editable requirements
return req.supports_pyproject_editable()
if req.use_pep517:
return True
assert check_bdist_wheel is not None
if not check_bdist_wheel(req):
# /!\ When we change this to unconditionally return True, we must also remove
# support for `--install-option`. Indeed, `--install-option` implies
# `--no-binary` so we can return False here and run `setup.py install`.
# `--global-option` and `--build-option` can remain until we drop support for
# building with `setup.py bdist_wheel`.
req.legacy_install_reason = LegacyInstallReasonNoBinaryForcesSetuptoolsInstall
return False
if not is_wheel_installed():
# we don't build legacy requirements if wheel is not installed
req.legacy_install_reason = LegacyInstallReasonMissingWheelPackage
return False
return True
@ -107,11 +83,8 @@ def should_build_for_wheel_command(
def should_build_for_install_command(
req: InstallRequirement,
check_bdist_wheel_allowed: BdistWheelAllowedPredicate,
) -> bool:
return _should_build(
req, need_wheel=False, check_bdist_wheel=check_bdist_wheel_allowed
)
return _should_build(req, need_wheel=False)
def _should_cache(

@ -117,4 +117,5 @@ if DEBUNDLED:
vendored("rich.traceback")
vendored("tenacity")
vendored("tomli")
vendored("truststore")
vendored("urllib3")

@ -8,11 +8,21 @@ Make it easy to import from cachecontrol without long namespaces.
"""
__author__ = "Eric Larson"
__email__ = "eric@ionrock.org"
__version__ = "0.12.11"
__version__ = "0.13.1"
from .wrapper import CacheControl
from .adapter import CacheControlAdapter
from .controller import CacheController
from pip._vendor.cachecontrol.adapter import CacheControlAdapter
from pip._vendor.cachecontrol.controller import CacheController
from pip._vendor.cachecontrol.wrapper import CacheControl
__all__ = [
"__author__",
"__email__",
"__version__",
"CacheControlAdapter",
"CacheController",
"CacheControl",
]
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())

@ -1,8 +1,11 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import logging
from argparse import ArgumentParser
from typing import TYPE_CHECKING
from pip._vendor import requests
@ -10,16 +13,19 @@ from pip._vendor.cachecontrol.adapter import CacheControlAdapter
from pip._vendor.cachecontrol.cache import DictCache
from pip._vendor.cachecontrol.controller import logger
from argparse import ArgumentParser
if TYPE_CHECKING:
from argparse import Namespace
from pip._vendor.cachecontrol.controller import CacheController
def setup_logging():
def setup_logging() -> None:
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
logger.addHandler(handler)
def get_session():
def get_session() -> requests.Session:
adapter = CacheControlAdapter(
DictCache(), cache_etags=True, serializer=None, heuristic=None
)
@ -27,17 +33,17 @@ def get_session():
sess.mount("http://", adapter)
sess.mount("https://", adapter)
sess.cache_controller = adapter.controller
sess.cache_controller = adapter.controller # type: ignore[attr-defined]
return sess
def get_args():
def get_args() -> Namespace:
parser = ArgumentParser()
parser.add_argument("url", help="The URL to try and cache")
return parser.parse_args()
def main(args=None):
def main() -> None:
args = get_args()
sess = get_session()
@ -48,10 +54,13 @@ def main(args=None):
setup_logging()
# try setting the cache
sess.cache_controller.cache_response(resp.request, resp.raw)
cache_controller: CacheController = (
sess.cache_controller # type: ignore[attr-defined]
)
cache_controller.cache_response(resp.request, resp.raw)
# Now try to get it
if sess.cache_controller.cached_request(resp.request):
if cache_controller.cached_request(resp.request):
print("Cached!")
else:
print("Not cached :(")

@ -1,16 +1,26 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import types
import functools
import types
import zlib
from typing import TYPE_CHECKING, Any, Collection, Mapping
from pip._vendor.requests.adapters import HTTPAdapter
from .controller import CacheController, PERMANENT_REDIRECT_STATUSES
from .cache import DictCache
from .filewrapper import CallbackFileWrapper
from pip._vendor.cachecontrol.cache import DictCache
from pip._vendor.cachecontrol.controller import PERMANENT_REDIRECT_STATUSES, CacheController
from pip._vendor.cachecontrol.filewrapper import CallbackFileWrapper
if TYPE_CHECKING:
from pip._vendor.requests import PreparedRequest, Response
from pip._vendor.urllib3 import HTTPResponse
from pip._vendor.cachecontrol.cache import BaseCache
from pip._vendor.cachecontrol.heuristics import BaseHeuristic
from pip._vendor.cachecontrol.serialize import Serializer
class CacheControlAdapter(HTTPAdapter):
@ -18,16 +28,16 @@ class CacheControlAdapter(HTTPAdapter):
def __init__(
self,
cache=None,
cache_etags=True,
controller_class=None,
serializer=None,
heuristic=None,
cacheable_methods=None,
*args,
**kw
):
super(CacheControlAdapter, self).__init__(*args, **kw)
cache: BaseCache | None = None,
cache_etags: bool = True,
controller_class: type[CacheController] | None = None,
serializer: Serializer | None = None,
heuristic: BaseHeuristic | None = None,
cacheable_methods: Collection[str] | None = None,
*args: Any,
**kw: Any,
) -> None:
super().__init__(*args, **kw)
self.cache = DictCache() if cache is None else cache
self.heuristic = heuristic
self.cacheable_methods = cacheable_methods or ("GET",)
@ -37,7 +47,16 @@ class CacheControlAdapter(HTTPAdapter):
self.cache, cache_etags=cache_etags, serializer=serializer
)
def send(self, request, cacheable_methods=None, **kw):
def send(
self,
request: PreparedRequest,
stream: bool = False,
timeout: None | float | tuple[float, float] | tuple[float, None] = None,
verify: bool | str = True,
cert: (None | bytes | str | tuple[bytes | str, bytes | str]) = None,
proxies: Mapping[str, str] | None = None,
cacheable_methods: Collection[str] | None = None,
) -> Response:
"""
Send a request. Use the request information to see if it
exists in the cache and cache the response if we need to and can.
@ -54,13 +73,17 @@ class CacheControlAdapter(HTTPAdapter):
# check for etags and add headers if appropriate
request.headers.update(self.controller.conditional_headers(request))
resp = super(CacheControlAdapter, self).send(request, **kw)
resp = super().send(request, stream, timeout, verify, cert, proxies)
return resp
def build_response(
self, request, response, from_cache=False, cacheable_methods=None
):
self,
request: PreparedRequest,
response: HTTPResponse,
from_cache: bool = False,
cacheable_methods: Collection[str] | None = None,
) -> Response:
"""
Build a response by making a request or using the cache.
@ -102,36 +125,37 @@ class CacheControlAdapter(HTTPAdapter):
else:
# Wrap the response file with a wrapper that will cache the
# response when the stream has been consumed.
response._fp = CallbackFileWrapper(
response._fp,
response._fp = CallbackFileWrapper( # type: ignore[attr-defined]
response._fp, # type: ignore[attr-defined]
functools.partial(
self.controller.cache_response, request, response
),
)
if response.chunked:
super_update_chunk_length = response._update_chunk_length
super_update_chunk_length = response._update_chunk_length # type: ignore[attr-defined]
def _update_chunk_length(self):
def _update_chunk_length(self: HTTPResponse) -> None:
super_update_chunk_length()
if self.chunk_left == 0:
self._fp._close()
self._fp._close() # type: ignore[attr-defined]
response._update_chunk_length = types.MethodType(
response._update_chunk_length = types.MethodType( # type: ignore[attr-defined]
_update_chunk_length, response
)
resp = super(CacheControlAdapter, self).build_response(request, response)
resp: Response = super().build_response(request, response) # type: ignore[no-untyped-call]
# See if we should invalidate the cache.
if request.method in self.invalidating_methods and resp.ok:
assert request.url is not None
cache_url = self.controller.cache_url(request.url)
self.cache.delete(cache_url)
# Give the request a from_cache attr to let people use it
resp.from_cache = from_cache
resp.from_cache = from_cache # type: ignore[attr-defined]
return resp
def close(self):
def close(self) -> None:
self.cache.close()
super(CacheControlAdapter, self).close()
super().close() # type: ignore[no-untyped-call]

@ -6,38 +6,46 @@
The cache object API for implementing caches. The default is a thread
safe in-memory dictionary.
"""
from __future__ import annotations
from threading import Lock
from typing import IO, TYPE_CHECKING, MutableMapping
if TYPE_CHECKING:
from datetime import datetime
class BaseCache(object):
def get(self, key):
class BaseCache:
def get(self, key: str) -> bytes | None:
raise NotImplementedError()
def set(self, key, value, expires=None):
def set(
self, key: str, value: bytes, expires: int | datetime | None = None
) -> None:
raise NotImplementedError()
def delete(self, key):
def delete(self, key: str) -> None:
raise NotImplementedError()
def close(self):
def close(self) -> None:
pass
class DictCache(BaseCache):
def __init__(self, init_dict=None):
def __init__(self, init_dict: MutableMapping[str, bytes] | None = None) -> None:
self.lock = Lock()
self.data = init_dict or {}
def get(self, key):
def get(self, key: str) -> bytes | None:
return self.data.get(key, None)
def set(self, key, value, expires=None):
def set(
self, key: str, value: bytes, expires: int | datetime | None = None
) -> None:
with self.lock:
self.data.update({key: value})
def delete(self, key):
def delete(self, key: str) -> None:
with self.lock:
if key in self.data:
self.data.pop(key)
@ -55,10 +63,11 @@ class SeparateBodyBaseCache(BaseCache):
Similarly, the body should be loaded separately via ``get_body()``.
"""
def set_body(self, key, body):
def set_body(self, key: str, body: bytes) -> None:
raise NotImplementedError()
def get_body(self, key):
def get_body(self, key: str) -> IO[bytes] | None:
"""
Return the body as file-like object.
"""

@ -2,8 +2,7 @@
#
# SPDX-License-Identifier: Apache-2.0
from .file_cache import FileCache, SeparateBodyFileCache
from .redis_cache import RedisCache
from pip._vendor.cachecontrol.caches.file_cache import FileCache, SeparateBodyFileCache
from pip._vendor.cachecontrol.caches.redis_cache import RedisCache
__all__ = ["FileCache", "SeparateBodyFileCache", "RedisCache"]

@ -1,22 +1,23 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import hashlib
import os
from textwrap import dedent
from typing import IO, TYPE_CHECKING
from ..cache import BaseCache, SeparateBodyBaseCache
from ..controller import CacheController
from pip._vendor.cachecontrol.cache import BaseCache, SeparateBodyBaseCache
from pip._vendor.cachecontrol.controller import CacheController
try:
FileNotFoundError
except NameError:
# py2.X
FileNotFoundError = (IOError, OSError)
if TYPE_CHECKING:
from datetime import datetime
from filelock import BaseFileLock
def _secure_open_write(filename, fmode):
def _secure_open_write(filename: str, fmode: int) -> IO[bytes]:
# We only want to write to this file, so open it in write only mode
flags = os.O_WRONLY
@ -39,7 +40,7 @@ def _secure_open_write(filename, fmode):
# there
try:
os.remove(filename)
except (IOError, OSError):
except OSError:
# The file must not exist already, so we can just skip ahead to opening
pass
@ -62,37 +63,27 @@ class _FileCacheMixin:
def __init__(
self,
directory,
forever=False,
filemode=0o0600,
dirmode=0o0700,
use_dir_lock=None,
lock_class=None,
):
if use_dir_lock is not None and lock_class is not None:
raise ValueError("Cannot use use_dir_lock and lock_class together")
directory: str,
forever: bool = False,
filemode: int = 0o0600,
dirmode: int = 0o0700,
lock_class: type[BaseFileLock] | None = None,
) -> None:
try:
from lockfile import LockFile
from lockfile.mkdirlockfile import MkdirLockFile
if lock_class is None:
from filelock import FileLock
lock_class = FileLock
except ImportError:
notice = dedent(
"""
NOTE: In order to use the FileCache you must have
lockfile installed. You can install it via pip:
pip install lockfile
filelock installed. You can install it via pip:
pip install filelock
"""
)
raise ImportError(notice)
else:
if use_dir_lock:
lock_class = MkdirLockFile
elif lock_class is None:
lock_class = LockFile
self.directory = directory
self.forever = forever
self.filemode = filemode
@ -100,17 +91,17 @@ class _FileCacheMixin:
self.lock_class = lock_class
@staticmethod
def encode(x):
def encode(x: str) -> str:
return hashlib.sha224(x.encode()).hexdigest()
def _fn(self, name):
def _fn(self, name: str) -> str:
# NOTE: This method should not change as some may depend on it.
# See: https://github.com/ionrock/cachecontrol/issues/63
hashed = self.encode(name)
parts = list(hashed[:5]) + [hashed]
return os.path.join(self.directory, *parts)
def get(self, key):
def get(self, key: str) -> bytes | None:
name = self._fn(key)
try:
with open(name, "rb") as fh:
@ -119,26 +110,28 @@ class _FileCacheMixin:
except FileNotFoundError:
return None
def set(self, key, value, expires=None):
def set(
self, key: str, value: bytes, expires: int | datetime | None = None
) -> None:
name = self._fn(key)
self._write(name, value)
def _write(self, path, data: bytes):
def _write(self, path: str, data: bytes) -> None:
"""
Safely write the data to the given path.
"""
# Make sure the directory exists
try:
os.makedirs(os.path.dirname(path), self.dirmode)
except (IOError, OSError):
except OSError:
pass
with self.lock_class(path) as lock:
with self.lock_class(path + ".lock"):
# Write our actual file
with _secure_open_write(lock.path, self.filemode) as fh:
with _secure_open_write(path, self.filemode) as fh:
fh.write(data)
def _delete(self, key, suffix):
def _delete(self, key: str, suffix: str) -> None:
name = self._fn(key) + suffix
if not self.forever:
try:
@ -153,7 +146,7 @@ class FileCache(_FileCacheMixin, BaseCache):
downloads.
"""
def delete(self, key):
def delete(self, key: str) -> None:
self._delete(key, "")
@ -163,23 +156,23 @@ class SeparateBodyFileCache(_FileCacheMixin, SeparateBodyBaseCache):
peak memory usage.
"""
def get_body(self, key):
def get_body(self, key: str) -> IO[bytes] | None:
name = self._fn(key) + ".body"
try:
return open(name, "rb")
except FileNotFoundError:
return None
def set_body(self, key, body):
def set_body(self, key: str, body: bytes) -> None:
name = self._fn(key) + ".body"
self._write(name, body)
def delete(self, key):
def delete(self, key: str) -> None:
self._delete(key, "")
self._delete(key, ".body")
def url_to_file_path(url, filecache):
def url_to_file_path(url: str, filecache: FileCache) -> str:
"""Return the file cache path based on the URL.
This does not ensure the file exists!

@ -1,39 +1,48 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
from __future__ import division
from datetime import datetime
from datetime import datetime, timezone
from typing import TYPE_CHECKING
from pip._vendor.cachecontrol.cache import BaseCache
if TYPE_CHECKING:
from redis import Redis
class RedisCache(BaseCache):
def __init__(self, conn):
class RedisCache(BaseCache):
def __init__(self, conn: Redis[bytes]) -> None:
self.conn = conn
def get(self, key):
def get(self, key: str) -> bytes | None:
return self.conn.get(key)
def set(self, key, value, expires=None):
def set(
self, key: str, value: bytes, expires: int | datetime | None = None
) -> None:
if not expires:
self.conn.set(key, value)
elif isinstance(expires, datetime):
expires = expires - datetime.utcnow()
self.conn.setex(key, int(expires.total_seconds()), value)
now_utc = datetime.now(timezone.utc)
if expires.tzinfo is None:
now_utc = now_utc.replace(tzinfo=None)
delta = expires - now_utc
self.conn.setex(key, int(delta.total_seconds()), value)
else:
self.conn.setex(key, expires, value)
def delete(self, key):
def delete(self, key: str) -> None:
self.conn.delete(key)
def clear(self):
def clear(self) -> None:
"""Helper for clearing all the keys in a database. Use with
caution!"""
for key in self.conn.keys():
self.conn.delete(key)
def close(self):
def close(self) -> None:
"""Redis uses connection pooling, no need to close the connection."""
pass

@ -1,32 +0,0 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0
try:
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin
try:
import cPickle as pickle
except ImportError:
import pickle
# Handle the case where the requests module has been patched to not have
# urllib3 bundled as part of its source.
try:
from pip._vendor.requests.packages.urllib3.response import HTTPResponse
except ImportError:
from pip._vendor.urllib3.response import HTTPResponse
try:
from pip._vendor.requests.packages.urllib3.util import is_fp_closed
except ImportError:
from pip._vendor.urllib3.util import is_fp_closed
# Replicate some six behaviour
try:
text_type = unicode
except NameError:
text_type = str

@ -5,17 +5,27 @@
"""
The httplib2 algorithms ported for use with requests.
"""
from __future__ import annotations
import calendar
import logging
import re
import calendar
import time
from email.utils import parsedate_tz
from typing import TYPE_CHECKING, Collection, Mapping
from pip._vendor.requests.structures import CaseInsensitiveDict
from .cache import DictCache, SeparateBodyBaseCache
from .serialize import Serializer
from pip._vendor.cachecontrol.cache import DictCache, SeparateBodyBaseCache
from pip._vendor.cachecontrol.serialize import Serializer
if TYPE_CHECKING:
from typing import Literal
from pip._vendor.requests import PreparedRequest
from pip._vendor.urllib3 import HTTPResponse
from pip._vendor.cachecontrol.cache import BaseCache
logger = logging.getLogger(__name__)
@ -24,20 +34,26 @@ URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
PERMANENT_REDIRECT_STATUSES = (301, 308)
def parse_uri(uri):
def parse_uri(uri: str) -> tuple[str, str, str, str, str]:
"""Parses a URI using the regex given in Appendix B of RFC 3986.
(scheme, authority, path, query, fragment) = parse_uri(uri)
"""
groups = URI.match(uri).groups()
match = URI.match(uri)
assert match is not None
groups = match.groups()
return (groups[1], groups[3], groups[4], groups[6], groups[8])
class CacheController(object):
class CacheController:
"""An interface to see if request should cached or not."""
def __init__(
self, cache=None, cache_etags=True, serializer=None, status_codes=None
self,
cache: BaseCache | None = None,
cache_etags: bool = True,
serializer: Serializer | None = None,
status_codes: Collection[int] | None = None,
):
self.cache = DictCache() if cache is None else cache
self.cache_etags = cache_etags
@ -45,7 +61,7 @@ class CacheController(object):
self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308)
@classmethod
def _urlnorm(cls, uri):
def _urlnorm(cls, uri: str) -> str:
"""Normalize the URL to create a safe key for the cache"""
(scheme, authority, path, query, fragment) = parse_uri(uri)
if not scheme or not authority:
@ -65,10 +81,10 @@ class CacheController(object):
return defrag_uri
@classmethod
def cache_url(cls, uri):
def cache_url(cls, uri: str) -> str:
return cls._urlnorm(uri)
def parse_cache_control(self, headers):
def parse_cache_control(self, headers: Mapping[str, str]) -> dict[str, int | None]:
known_directives = {
# https://tools.ietf.org/html/rfc7234#section-5.2
"max-age": (int, True),
@ -87,7 +103,7 @@ class CacheController(object):
cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))
retval = {}
retval: dict[str, int | None] = {}
for cc_directive in cc_headers.split(","):
if not cc_directive.strip():
@ -122,11 +138,33 @@ class CacheController(object):
return retval
def cached_request(self, request):
def _load_from_cache(self, request: PreparedRequest) -> HTTPResponse | None:
"""
Load a cached response, or return None if it's not available.
"""
cache_url = request.url
assert cache_url is not None
cache_data = self.cache.get(cache_url)
if cache_data is None:
logger.debug("No cache entry available")
return None
if isinstance(self.cache, SeparateBodyBaseCache):
body_file = self.cache.get_body(cache_url)
else:
body_file = None
result = self.serializer.loads(request, cache_data, body_file)
if result is None:
logger.warning("Cache entry deserialization failed, entry ignored")
return result
def cached_request(self, request: PreparedRequest) -> HTTPResponse | Literal[False]:
"""
Return a cached response if it exists in the cache, otherwise
return False.
"""
assert request.url is not None
cache_url = self.cache_url(request.url)
logger.debug('Looking up "%s" in the cache', cache_url)
cc = self.parse_cache_control(request.headers)
@ -140,21 +178,9 @@ class CacheController(object):
logger.debug('Request header has "max_age" as 0, cache bypassed')
return False
# Request allows serving from the cache, let's see if we find something
cache_data = self.cache.get(cache_url)
if cache_data is None:
logger.debug("No cache entry available")
return False
if isinstance(self.cache, SeparateBodyBaseCache):
body_file = self.cache.get_body(cache_url)
else:
body_file = None
# Check whether it can be deserialized
resp = self.serializer.loads(request, cache_data, body_file)
# Check whether we can load the response from the cache:
resp = self._load_from_cache(request)
if not resp:
logger.warning("Cache entry deserialization failed, entry ignored")
return False
# If we have a cached permanent redirect, return it immediately. We
@ -174,7 +200,7 @@ class CacheController(object):
logger.debug(msg)
return resp
headers = CaseInsensitiveDict(resp.headers)
headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers)
if not headers or "date" not in headers:
if "etag" not in headers:
# Without date or etag, the cached response can never be used
@ -185,7 +211,9 @@ class CacheController(object):
return False
now = time.time()
date = calendar.timegm(parsedate_tz(headers["date"]))
time_tuple = parsedate_tz(headers["date"])
assert time_tuple is not None
date = calendar.timegm(time_tuple[:6])
current_age = max(0, now - date)
logger.debug("Current age based on date: %i", current_age)
@ -199,28 +227,30 @@ class CacheController(object):
freshness_lifetime = 0
# Check the max-age pragma in the cache control header
if "max-age" in resp_cc:
freshness_lifetime = resp_cc["max-age"]
max_age = resp_cc.get("max-age")
if max_age is not None:
freshness_lifetime = max_age
logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)
# If there isn't a max-age, check for an expires header
elif "expires" in headers:
expires = parsedate_tz(headers["expires"])
if expires is not None:
expire_time = calendar.timegm(expires) - date
expire_time = calendar.timegm(expires[:6]) - date
freshness_lifetime = max(0, expire_time)
logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)
# Determine if we are setting freshness limit in the
# request. Note, this overrides what was in the response.
if "max-age" in cc:
freshness_lifetime = cc["max-age"]
max_age = cc.get("max-age")
if max_age is not None:
freshness_lifetime = max_age
logger.debug(
"Freshness lifetime from request max-age: %i", freshness_lifetime
)
if "min-fresh" in cc:
min_fresh = cc["min-fresh"]
min_fresh = cc.get("min-fresh")
if min_fresh is not None:
# adjust our current age by our min fresh
current_age += min_fresh
logger.debug("Adjusted current age from min-fresh: %i", current_age)
@ -239,13 +269,12 @@ class CacheController(object):
# return the original handler
return False
def conditional_headers(self, request):
cache_url = self.cache_url(request.url)
resp = self.serializer.loads(request, self.cache.get(cache_url))
def conditional_headers(self, request: PreparedRequest) -> dict[str, str]:
resp = self._load_from_cache(request)
new_headers = {}
if resp:
headers = CaseInsensitiveDict(resp.headers)
headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers)
if "etag" in headers:
new_headers["If-None-Match"] = headers["ETag"]
@ -255,7 +284,14 @@ class CacheController(object):
return new_headers
def _cache_set(self, cache_url, request, response, body=None, expires_time=None):
def _cache_set(
self,
cache_url: str,
request: PreparedRequest,
response: HTTPResponse,
body: bytes | None = None,
expires_time: int | None = None,
) -> None:
"""
Store the data in the cache.
"""
@ -267,7 +303,10 @@ class CacheController(object):
self.serializer.dumps(request, response, b""),
expires=expires_time,
)
self.cache.set_body(cache_url, body)
# body is None can happen when, for example, we're only updating
# headers, as is the case in update_cached_response().
if body is not None:
self.cache.set_body(cache_url, body)
else:
self.cache.set(
cache_url,
@ -275,7 +314,13 @@ class CacheController(object):
expires=expires_time,
)
def cache_response(self, request, response, body=None, status_codes=None):
def cache_response(
self,
request: PreparedRequest,
response: HTTPResponse,
body: bytes | None = None,
status_codes: Collection[int] | None = None,
) -> None:
"""
Algorithm for caching requests.
@ -290,10 +335,14 @@ class CacheController(object):
)
return
response_headers = CaseInsensitiveDict(response.headers)
response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(
response.headers
)
if "date" in response_headers:
date = calendar.timegm(parsedate_tz(response_headers["date"]))
time_tuple = parsedate_tz(response_headers["date"])
assert time_tuple is not None
date = calendar.timegm(time_tuple[:6])
else:
date = 0
@ -312,6 +361,7 @@ class CacheController(object):
cc_req = self.parse_cache_control(request.headers)
cc = self.parse_cache_control(response_headers)
assert request.url is not None
cache_url = self.cache_url(request.url)
logger.debug('Updating cache with response from "%s"', cache_url)
@ -344,11 +394,11 @@ class CacheController(object):
if response_headers.get("expires"):
expires = parsedate_tz(response_headers["expires"])
if expires is not None:
expires_time = calendar.timegm(expires) - date
expires_time = calendar.timegm(expires[:6]) - date
expires_time = max(expires_time, 14 * 86400)
logger.debug("etag object cached for {0} seconds".format(expires_time))
logger.debug(f"etag object cached for {expires_time} seconds")
logger.debug("Caching due to etag")
self._cache_set(cache_url, request, response, body, expires_time)
@ -362,11 +412,14 @@ class CacheController(object):
# is no date header then we can't do anything about expiring
# the cache.
elif "date" in response_headers:
date = calendar.timegm(parsedate_tz(response_headers["date"]))
time_tuple = parsedate_tz(response_headers["date"])
assert time_tuple is not None
date = calendar.timegm(time_tuple[:6])
# cache when there is a max-age > 0
if "max-age" in cc and cc["max-age"] > 0:
max_age = cc.get("max-age")
if max_age is not None and max_age > 0:
logger.debug("Caching b/c date exists and max-age > 0")
expires_time = cc["max-age"]
expires_time = max_age
self._cache_set(
cache_url,
request,
@ -381,12 +434,12 @@ class CacheController(object):
if response_headers["expires"]:
expires = parsedate_tz(response_headers["expires"])
if expires is not None:
expires_time = calendar.timegm(expires) - date
expires_time = calendar.timegm(expires[:6]) - date
else:
expires_time = None
logger.debug(
"Caching b/c of expires header. expires in {0} seconds".format(
"Caching b/c of expires header. expires in {} seconds".format(
expires_time
)
)
@ -398,16 +451,18 @@ class CacheController(object):
expires_time,
)
def update_cached_response(self, request, response):
def update_cached_response(
self, request: PreparedRequest, response: HTTPResponse
) -> HTTPResponse:
"""On a 304 we will get a new set of headers that we want to
update our cached value with, assuming we have one.
This should only ever be called when we've sent an ETag and
gotten a 304 as the response.
"""
assert request.url is not None
cache_url = self.cache_url(request.url)
cached_response = self.serializer.loads(request, self.cache.get(cache_url))
cached_response = self._load_from_cache(request)
if not cached_response:
# we didn't have a cached response
@ -423,11 +478,11 @@ class CacheController(object):
excluded_headers = ["content-length"]
cached_response.headers.update(
dict(
(k, v)
for k, v in response.headers.items()
{
k: v
for k, v in response.headers.items() # type: ignore[no-untyped-call]
if k.lower() not in excluded_headers
)
}
)
# we want a 200 b/c we have content via the cache

@ -1,12 +1,17 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
from tempfile import NamedTemporaryFile
import mmap
from tempfile import NamedTemporaryFile
from typing import TYPE_CHECKING, Any, Callable
if TYPE_CHECKING:
from http.client import HTTPResponse
class CallbackFileWrapper(object):
class CallbackFileWrapper:
"""
Small wrapper around a fp object which will tee everything read into a
buffer, and when that file is closed it will execute a callback with the
@ -25,12 +30,14 @@ class CallbackFileWrapper(object):
performance impact.
"""
def __init__(self, fp, callback):
def __init__(
self, fp: HTTPResponse, callback: Callable[[bytes], None] | None
) -> None:
self.__buf = NamedTemporaryFile("rb+", delete=True)
self.__fp = fp
self.__callback = callback
def __getattr__(self, name):
def __getattr__(self, name: str) -> Any:
# The vaguaries of garbage collection means that self.__fp is
# not always set. By using __getattribute__ and the private
# name[0] allows looking up the attribute value and raising an
@ -42,7 +49,7 @@ class CallbackFileWrapper(object):
fp = self.__getattribute__("_CallbackFileWrapper__fp")
return getattr(fp, name)
def __is_fp_closed(self):
def __is_fp_closed(self) -> bool:
try:
return self.__fp.fp is None
@ -50,7 +57,8 @@ class CallbackFileWrapper(object):
pass
try:
return self.__fp.closed
closed: bool = self.__fp.closed
return closed
except AttributeError:
pass
@ -59,7 +67,7 @@ class CallbackFileWrapper(object):
# TODO: Add some logging here...
return False
def _close(self):
def _close(self) -> None:
if self.__callback:
if self.__buf.tell() == 0:
# Empty file:
@ -86,8 +94,8 @@ class CallbackFileWrapper(object):
# Important when caching big files.
self.__buf.close()
def read(self, amt=None):
data = self.__fp.read(amt)
def read(self, amt: int | None = None) -> bytes:
data: bytes = self.__fp.read(amt)
if data:
# We may be dealing with b'', a sign that things are over:
# it's passed e.g. after we've already closed self.__buf.
@ -97,8 +105,8 @@ class CallbackFileWrapper(object):
return data
def _safe_read(self, amt):
data = self.__fp._safe_read(amt)
def _safe_read(self, amt: int) -> bytes:
data: bytes = self.__fp._safe_read(amt) # type: ignore[attr-defined]
if amt == 2 and data == b"\r\n":
# urllib executes this read to toss the CRLF at the end
# of the chunk.

@ -1,29 +1,31 @@
# SPDX-FileCopyrightText: 2015 Eric Larson
#
# SPDX-License-Identifier: Apache-2.0
from __future__ import annotations
import calendar
import time
from datetime import datetime, timedelta, timezone
from email.utils import formatdate, parsedate, parsedate_tz
from typing import TYPE_CHECKING, Any, Mapping
from datetime import datetime, timedelta
if TYPE_CHECKING:
from pip._vendor.urllib3 import HTTPResponse
TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
def expire_after(delta, date=None):
date = date or datetime.utcnow()
def expire_after(delta: timedelta, date: datetime | None = None) -> datetime:
date = date or datetime.now(timezone.utc)
return date + delta
def datetime_to_header(dt):
def datetime_to_header(dt: datetime) -> str:
return formatdate(calendar.timegm(dt.timetuple()))
class BaseHeuristic(object):
def warning(self, response):
class BaseHeuristic:
def warning(self, response: HTTPResponse) -> str | None:
"""
Return a valid 1xx warning header value describing the cache
adjustments.
@ -34,7 +36,7 @@ class BaseHeuristic(object):
"""
return '110 - "Response is Stale"'
def update_headers(self, response):
def update_headers(self, response: HTTPResponse) -> dict[str, str]:
"""Update the response headers with any new headers.
NOTE: This SHOULD always include some Warning header to
@ -43,7 +45,7 @@ class BaseHeuristic(object):
"""
return {}
def apply(self, response):
def apply(self, response: HTTPResponse) -> HTTPResponse:
updated_headers = self.update_headers(response)
if updated_headers:
@ -61,12 +63,12 @@ class OneDayCache(BaseHeuristic):
future.
"""
def update_headers(self, response):
def update_headers(self, response: HTTPResponse) -> dict[str, str]:
headers = {}
if "expires" not in response.headers:
date = parsedate(response.headers["date"])
expires = expire_after(timedelta(days=1), date=datetime(*date[:6]))
expires = expire_after(timedelta(days=1), date=datetime(*date[:6], tzinfo=timezone.utc)) # type: ignore[misc]
headers["expires"] = datetime_to_header(expires)
headers["cache-control"] = "public"
return headers
@ -77,14 +79,14 @@ class ExpiresAfter(BaseHeuristic):
Cache **all** requests for a defined time period.
"""
def __init__(self, **kw):
def __init__(self, **kw: Any) -> None:
self.delta = timedelta(**kw)
def update_headers(self, response):
def update_headers(self, response: HTTPResponse) -> dict[str, str]:
expires = expire_after(self.delta)
return {"expires": datetime_to_header(expires), "cache-control": "public"}
def warning(self, response):
def warning(self, response: HTTPResponse) -> str | None:
tmpl = "110 - Automatically cached for %s. Response might be stale"
return tmpl % self.delta
@ -101,12 +103,23 @@ class LastModified(BaseHeuristic):
http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
Unlike mozilla we limit this to 24-hr.
"""
cacheable_by_default_statuses = {
200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
200,
203,
204,
206,
300,
301,
404,
405,
410,
414,
501,
}
def update_headers(self, resp):
headers = resp.headers
def update_headers(self, resp: HTTPResponse) -> dict[str, str]:
headers: Mapping[str, str] = resp.headers
if "expires" in headers:
return {}
@ -120,9 +133,11 @@ class LastModified(BaseHeuristic):
if "date" not in headers or "last-modified" not in headers:
return {}
date = calendar.timegm(parsedate_tz(headers["date"]))
time_tuple = parsedate_tz(headers["date"])
assert time_tuple is not None
date = calendar.timegm(time_tuple[:6])
last_modified = parsedate(headers["last-modified"])
if date is None or last_modified is None:
if last_modified is None:
return {}
now = time.time()
@ -135,5 +150,5 @@ class LastModified(BaseHeuristic):
expires = date + freshness_lifetime
return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}
def warning(self, resp):
def warning(self, resp: HTTPResponse) -> str | None:
return None

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save