From c69d55f7c82d5ae2cce542bcfb98d043ca4836a0 Mon Sep 17 00:00:00 2001 From: Matt Davis Date: Wed, 6 Jul 2022 22:09:37 -0400 Subject: [PATCH 1/4] Vendor in pip 22.1.2 --- pipenv/patched/notpip/__init__.py | 2 +- pipenv/patched/notpip/_internal/build_env.py | 18 +- .../notpip/_internal/cli/base_command.py | 2 +- .../notpip/_internal/cli/cmdoptions.py | 66 +- .../notpip/_internal/cli/command_context.py | 4 +- pipenv/patched/notpip/_internal/cli/parser.py | 6 +- .../notpip/_internal/cli/progress_bars.py | 263 +- .../notpip/_internal/cli/req_command.py | 35 +- .../patched/notpip/_internal/cli/spinners.py | 12 +- .../notpip/_internal/commands/completion.py | 30 + .../_internal/commands/configuration.py | 22 +- .../notpip/_internal/commands/debug.py | 3 +- .../notpip/_internal/commands/download.py | 7 +- .../notpip/_internal/commands/install.py | 14 +- .../patched/notpip/_internal/commands/list.py | 4 +- .../patched/notpip/_internal/commands/show.py | 13 +- .../notpip/_internal/commands/uninstall.py | 7 +- .../notpip/_internal/commands/wheel.py | 14 +- .../patched/notpip/_internal/configuration.py | 12 +- .../notpip/_internal/distributions/base.py | 5 +- .../_internal/distributions/installed.py | 5 +- .../notpip/_internal/distributions/sdist.py | 27 +- .../notpip/_internal/distributions/wheel.py | 5 +- .../notpip/_internal/index/package_finder.py | 106 +- .../notpip/_internal/locations/__init__.py | 16 +- .../notpip/_internal/locations/_distutils.py | 2 +- .../notpip/_internal/metadata/__init__.py | 69 +- .../patched/notpip/_internal/metadata/base.py | 63 +- .../_internal/metadata/importlib/__init__.py | 4 + .../_internal/metadata/importlib/_compat.py | 41 + .../_internal/metadata/importlib/_dists.py | 274 ++ .../_internal/metadata/importlib/_envs.py | 163 ++ .../_internal/metadata/pkg_resources.py | 46 +- .../notpip/_internal/models/direct_url.py | 8 - .../patched/notpip/_internal/network/cache.py | 4 +- .../notpip/_internal/network/lazy_wheel.py | 8 +- .../notpip/_internal/network/session.py | 6 +- .../patched/notpip/_internal/network/utils.py | 4 +- .../build/build_tracker.py} | 20 +- .../notpip/_internal/operations/freeze.py | 4 +- .../_internal/operations/install/wheel.py | 7 +- .../notpip/_internal/operations/prepare.py | 99 +- pipenv/patched/notpip/_internal/pyproject.py | 11 +- .../patched/notpip/_internal/req/__init__.py | 4 +- .../notpip/_internal/req/constructors.py | 11 + .../patched/notpip/_internal/req/req_file.py | 16 +- .../notpip/_internal/req/req_install.py | 6 +- .../patched/notpip/_internal/req/req_set.py | 122 +- .../notpip/_internal/req/req_uninstall.py | 27 +- .../_internal/resolution/legacy/resolver.py | 130 +- .../resolution/resolvelib/candidates.py | 3 + .../resolution/resolvelib/factory.py | 7 + .../notpip/_internal/self_outdated_check.py | 233 +- .../notpip/_internal/utils/encoding.py | 2 +- .../notpip/_internal/utils/entrypoints.py | 52 + .../notpip/_internal/utils/filesystem.py | 33 +- .../patched/notpip/_internal/utils/hashes.py | 4 +- .../patched/notpip/_internal/utils/logging.py | 27 +- pipenv/patched/notpip/_internal/utils/misc.py | 104 +- .../notpip/_internal/utils/subprocess.py | 4 +- .../notpip/_internal/utils/temp_dir.py | 8 +- .../notpip/_internal/utils/unpacking.py | 3 +- .../notpip/_vendor/cachecontrol/__init__.py | 2 +- .../notpip/_vendor/cachecontrol/cache.py | 22 + .../_vendor/cachecontrol/caches/__init__.py | 7 +- .../_vendor/cachecontrol/caches/file_cache.py | 52 +- .../cachecontrol/caches/redis_cache.py | 4 +- .../notpip/_vendor/cachecontrol/controller.py | 60 +- .../notpip/_vendor/cachecontrol/serialize.py | 30 +- pipenv/patched/notpip/_vendor/certifi/LICENSE | 21 + pipenv/patched/notpip/_vendor/chardet/LICENSE | 504 ++++ .../notpip/_vendor/colorama/LICENSE.txt | 27 + .../notpip/_vendor/distlib/LICENSE.txt | 284 ++ .../_vendor/distlib/_backport/__init__.py | 6 + .../notpip/_vendor/distlib/_backport/misc.py | 41 + .../_vendor/distlib/_backport/shutil.py | 764 +++++ .../_vendor/distlib/_backport/sysconfig.cfg | 84 + .../_vendor/distlib/_backport/sysconfig.py | 786 +++++ .../_vendor/distlib/_backport/tarfile.py | 2607 +++++++++++++++++ .../distro/LICENSE} | 0 .../patched/notpip/_vendor/distro/__init__.py | 54 + .../patched/notpip/_vendor/distro/__main__.py | 4 + .../notpip/_vendor/{ => distro}/distro.py | 412 ++- .../patched/notpip/_vendor/html5lib/LICENSE | 20 + pipenv/patched/notpip/_vendor/idna/LICENSE.md | 29 + pipenv/patched/notpip/_vendor/msgpack/COPYING | 14 + .../patched/notpip/_vendor/packaging/LICENSE | 3 + .../notpip/_vendor/packaging/LICENSE.APACHE | 177 ++ .../notpip/_vendor/packaging/LICENSE.BSD | 23 + pipenv/patched/notpip/_vendor/pep517/LICENSE | 21 + .../notpip/_vendor/pkg_resources/LICENSE | 19 + .../notpip/_vendor/platformdirs/__init__.py | 27 +- .../notpip/_vendor/platformdirs/android.py | 15 +- .../notpip/_vendor/platformdirs/version.py | 6 +- .../notpip/_vendor/progress/__init__.py | 189 -- pipenv/patched/notpip/_vendor/progress/bar.py | 93 - .../patched/notpip/_vendor/progress/colors.py | 79 - .../notpip/_vendor/progress/counter.py | 47 - .../notpip/_vendor/progress/spinner.py | 45 - .../patched/notpip/_vendor/pygments/LICENSE | 25 + .../pyparsing/LICENSE} | 0 .../notpip/_vendor/pyparsing/__init__.py | 25 +- .../patched/notpip/_vendor/pyparsing/core.py | 151 +- .../_vendor/pyparsing/diagram/__init__.py | 22 +- .../notpip/_vendor/pyparsing/helpers.py | 40 +- .../notpip/_vendor/pyparsing/results.py | 18 +- .../patched/notpip/_vendor/requests/LICENSE | 175 ++ .../patched/notpip/_vendor/resolvelib/LICENSE | 13 + .../patched/notpip/_vendor/rich/__init__.py | 6 +- .../patched/notpip/_vendor/rich/__main__.py | 20 +- .../patched/notpip/_vendor/rich/_inspect.py | 22 +- .../patched/notpip/_vendor/rich/_lru_cache.py | 24 +- .../patched/notpip/_vendor/rich/_spinners.py | 474 +-- .../notpip/_vendor/rich/_win32_console.py | 630 ++++ .../patched/notpip/_vendor/rich/_windows.py | 34 +- .../notpip/_vendor/rich/_windows_renderer.py | 53 + pipenv/patched/notpip/_vendor/rich/align.py | 1 - pipenv/patched/notpip/_vendor/rich/ansi.py | 51 +- pipenv/patched/notpip/_vendor/rich/cells.py | 31 +- pipenv/patched/notpip/_vendor/rich/color.py | 40 +- pipenv/patched/notpip/_vendor/rich/console.py | 422 ++- pipenv/patched/notpip/_vendor/rich/control.py | 2 +- .../notpip/_vendor/rich/default_styles.py | 2 +- .../patched/notpip/_vendor/rich/diagnose.py | 35 +- .../patched/notpip/_vendor/rich/filesize.py | 6 +- .../notpip/_vendor/rich/highlighter.py | 50 +- pipenv/patched/notpip/_vendor/rich/jupyter.py | 14 +- pipenv/patched/notpip/_vendor/rich/layout.py | 3 +- pipenv/patched/notpip/_vendor/rich/logging.py | 18 +- pipenv/patched/notpip/_vendor/rich/markup.py | 15 +- pipenv/patched/notpip/_vendor/rich/measure.py | 6 +- pipenv/patched/notpip/_vendor/rich/pager.py | 2 +- pipenv/patched/notpip/_vendor/rich/panel.py | 15 +- pipenv/patched/notpip/_vendor/rich/pretty.py | 140 +- .../patched/notpip/_vendor/rich/progress.py | 604 +++- pipenv/patched/notpip/_vendor/rich/prompt.py | 4 +- .../patched/notpip/_vendor/rich/protocol.py | 2 +- pipenv/patched/notpip/_vendor/rich/repr.py | 37 +- pipenv/patched/notpip/_vendor/rich/segment.py | 87 +- pipenv/patched/notpip/_vendor/rich/syntax.py | 96 +- pipenv/patched/notpip/_vendor/rich/table.py | 30 +- .../patched/notpip/_vendor/rich/tabulate.py | 51 - .../notpip/_vendor/rich/terminal_theme.py | 98 + pipenv/patched/notpip/_vendor/rich/text.py | 3 + .../patched/notpip/_vendor/rich/traceback.py | 25 +- pipenv/patched/notpip/_vendor/rich/tree.py | 10 +- pipenv/patched/notpip/_vendor/six.LICENSE | 18 + pipenv/patched/notpip/_vendor/tomli/LICENSE | 21 + .../patched/notpip/_vendor/tomli/__init__.py | 11 +- .../patched/notpip/_vendor/tomli/_parser.py | 360 ++- pipenv/patched/notpip/_vendor/tomli/_re.py | 94 +- pipenv/patched/notpip/_vendor/tomli/_types.py | 10 + .../notpip/_vendor/typing_extensions.LICENSE | 254 ++ .../notpip/_vendor/typing_extensions.py | 1704 +++++------ .../notpip/_vendor/urllib3/LICENSE.txt | 21 + .../notpip/_vendor/urllib3/_version.py | 2 +- .../notpip/_vendor/urllib3/connection.py | 6 +- .../notpip/_vendor/urllib3/poolmanager.py | 1 + .../notpip/_vendor/urllib3/response.py | 5 +- .../notpip/_vendor/urllib3/util/request.py | 6 - .../urllib3/util/ssl_match_hostname.py | 10 +- pipenv/patched/notpip/_vendor/vendor.txt | 17 +- .../notpip/_vendor/webencodings/LICENSE | 31 + pipenv/patched/notpip/appdirs.LICENSE.txt | 23 - pipenv/patched/patched.txt | 2 +- pipenv/patched/safety/safety.py | 2 +- tasks/vendoring/__init__.py | 19 +- .../patches/patched/_post_pip_import.patch | 16 +- tasks/vendoring/patches/patched/pip22.patch | 60 +- .../patches/patched/pip_index_safety.patch | 12 + 170 files changed, 11397 insertions(+), 3845 deletions(-) create mode 100644 pipenv/patched/notpip/_internal/metadata/importlib/__init__.py create mode 100644 pipenv/patched/notpip/_internal/metadata/importlib/_compat.py create mode 100644 pipenv/patched/notpip/_internal/metadata/importlib/_dists.py create mode 100644 pipenv/patched/notpip/_internal/metadata/importlib/_envs.py rename pipenv/patched/notpip/_internal/{req/req_tracker.py => operations/build/build_tracker.py} (85%) create mode 100644 pipenv/patched/notpip/_vendor/certifi/LICENSE create mode 100644 pipenv/patched/notpip/_vendor/chardet/LICENSE create mode 100644 pipenv/patched/notpip/_vendor/colorama/LICENSE.txt create mode 100644 pipenv/patched/notpip/_vendor/distlib/LICENSE.txt create mode 100644 pipenv/patched/notpip/_vendor/distlib/_backport/__init__.py create mode 100644 pipenv/patched/notpip/_vendor/distlib/_backport/misc.py create mode 100644 pipenv/patched/notpip/_vendor/distlib/_backport/shutil.py create mode 100644 pipenv/patched/notpip/_vendor/distlib/_backport/sysconfig.cfg create mode 100644 pipenv/patched/notpip/_vendor/distlib/_backport/sysconfig.py create mode 100644 pipenv/patched/notpip/_vendor/distlib/_backport/tarfile.py rename pipenv/patched/notpip/{distro.LICENSE => _vendor/distro/LICENSE} (100%) create mode 100644 pipenv/patched/notpip/_vendor/distro/__init__.py create mode 100644 pipenv/patched/notpip/_vendor/distro/__main__.py rename pipenv/patched/notpip/_vendor/{ => distro}/distro.py (83%) create mode 100644 pipenv/patched/notpip/_vendor/html5lib/LICENSE create mode 100644 pipenv/patched/notpip/_vendor/idna/LICENSE.md create mode 100644 pipenv/patched/notpip/_vendor/msgpack/COPYING create mode 100644 pipenv/patched/notpip/_vendor/packaging/LICENSE create mode 100644 pipenv/patched/notpip/_vendor/packaging/LICENSE.APACHE create mode 100644 pipenv/patched/notpip/_vendor/packaging/LICENSE.BSD create mode 100644 pipenv/patched/notpip/_vendor/pep517/LICENSE create mode 100644 pipenv/patched/notpip/_vendor/pkg_resources/LICENSE delete mode 100644 pipenv/patched/notpip/_vendor/progress/__init__.py delete mode 100644 pipenv/patched/notpip/_vendor/progress/bar.py delete mode 100644 pipenv/patched/notpip/_vendor/progress/colors.py delete mode 100644 pipenv/patched/notpip/_vendor/progress/counter.py delete mode 100644 pipenv/patched/notpip/_vendor/progress/spinner.py create mode 100644 pipenv/patched/notpip/_vendor/pygments/LICENSE rename pipenv/patched/notpip/{pyparsing.LICENSE => _vendor/pyparsing/LICENSE} (100%) create mode 100644 pipenv/patched/notpip/_vendor/requests/LICENSE create mode 100644 pipenv/patched/notpip/_vendor/resolvelib/LICENSE create mode 100644 pipenv/patched/notpip/_vendor/rich/_win32_console.py create mode 100644 pipenv/patched/notpip/_vendor/rich/_windows_renderer.py delete mode 100644 pipenv/patched/notpip/_vendor/rich/tabulate.py create mode 100644 pipenv/patched/notpip/_vendor/six.LICENSE create mode 100644 pipenv/patched/notpip/_vendor/tomli/LICENSE create mode 100644 pipenv/patched/notpip/_vendor/tomli/_types.py create mode 100644 pipenv/patched/notpip/_vendor/typing_extensions.LICENSE create mode 100644 pipenv/patched/notpip/_vendor/urllib3/LICENSE.txt create mode 100644 pipenv/patched/notpip/_vendor/webencodings/LICENSE delete mode 100644 pipenv/patched/notpip/appdirs.LICENSE.txt diff --git a/pipenv/patched/notpip/__init__.py b/pipenv/patched/notpip/__init__.py index d19d72ceea..e4087f4f95 100644 --- a/pipenv/patched/notpip/__init__.py +++ b/pipenv/patched/notpip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "22.0.4" +__version__ = "22.1.2" def main(args: Optional[List[str]] = None) -> int: diff --git a/pipenv/patched/notpip/_internal/build_env.py b/pipenv/patched/notpip/_internal/build_env.py index 28dffd3604..abc34211c7 100644 --- a/pipenv/patched/notpip/_internal/build_env.py +++ b/pipenv/patched/notpip/_internal/build_env.py @@ -11,7 +11,7 @@ from collections import OrderedDict from sysconfig import get_paths from types import TracebackType -from typing import TYPE_CHECKING, Iterable, Iterator, List, Optional, Set, Tuple, Type +from typing import TYPE_CHECKING, Generator, Iterable, List, Optional, Set, Tuple, Type from pipenv.patched.notpip._vendor.certifi import where from pipenv.patched.notpip._vendor.packaging.requirements import Requirement @@ -20,7 +20,7 @@ from pip import __file__ as pip_location from pipenv.patched.notpip._internal.cli.spinners import open_spinner from pipenv.patched.notpip._internal.locations import get_platlib, get_prefixed_libs, get_purelib -from pipenv.patched.notpip._internal.metadata import get_environment +from pipenv.patched.notpip._internal.metadata import get_default_environment, get_environment from pipenv.patched.notpip._internal.utils.subprocess import call_subprocess from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory, tempdir_kinds @@ -42,7 +42,7 @@ def __init__(self, path: str) -> None: @contextlib.contextmanager -def _create_standalone_pip() -> Iterator[str]: +def _create_standalone_pip() -> Generator[str, None, None]: """Create a "standalone pip" zip file. The zip file's content is identical to the currently-running pip. @@ -168,9 +168,17 @@ def check_requirements( missing = set() conflicting = set() if reqs: - env = get_environment(self._lib_dirs) + env = ( + get_environment(self._lib_dirs) + if hasattr(self, "_lib_dirs") + else get_default_environment() + ) for req_str in reqs: req = Requirement(req_str) + # We're explicitly evaluating with an empty extra value, since build + # environments are not provided any mechanism to select specific extras. + if req.marker is not None and not req.marker.evaluate({"extra": ""}): + continue dist = env.get_distribution(req.name) if not dist: missing.add(req_str) @@ -179,7 +187,7 @@ def check_requirements( installed_req_str = f"{req.name}=={dist.version}" else: installed_req_str = f"{req.name}==={dist.version}" - if dist.version not in req.specifier: + if not req.specifier.contains(dist.version, prereleases=True): conflicting.add((installed_req_str, req_str)) # FIXME: Consider direct URL? return conflicting, missing diff --git a/pipenv/patched/notpip/_internal/cli/base_command.py b/pipenv/patched/notpip/_internal/cli/base_command.py index 6a71bc2125..bf946b4a95 100644 --- a/pipenv/patched/notpip/_internal/cli/base_command.py +++ b/pipenv/patched/notpip/_internal/cli/base_command.py @@ -168,7 +168,7 @@ def exc_logging_wrapper(*args: Any) -> int: assert isinstance(status, int) return status except DiagnosticPipError as exc: - logger.error("[present-diagnostic] %s", exc) + logger.error("[present-rich] %s", exc) logger.debug("Exception information:", exc_info=True) return ERROR diff --git a/pipenv/patched/notpip/_internal/cli/cmdoptions.py b/pipenv/patched/notpip/_internal/cli/cmdoptions.py index 603f14aa2d..38b427805e 100644 --- a/pipenv/patched/notpip/_internal/cli/cmdoptions.py +++ b/pipenv/patched/notpip/_internal/cli/cmdoptions.py @@ -10,6 +10,7 @@ # The following comment should be removed at some point in the future. # mypy: strict-optional=False +import importlib.util import logging import os import textwrap @@ -21,7 +22,6 @@ from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name from pipenv.patched.notpip._internal.cli.parser import ConfigOptionParser -from pipenv.patched.notpip._internal.cli.progress_bars import BAR_TYPES from pipenv.patched.notpip._internal.exceptions import CommandError from pipenv.patched.notpip._internal.locations import USER_CACHE_DIR, get_src_prefix from pipenv.patched.notpip._internal.models.format_control import FormatControl @@ -236,13 +236,9 @@ class PipOption(Option): "--progress-bar", dest="progress_bar", type="choice", - choices=list(BAR_TYPES.keys()), + choices=["on", "off"], default="on", - help=( - "Specify type of progress to be displayed [" - + "|".join(BAR_TYPES.keys()) - + "] (default: %default)" - ), + help="Specify whether the progress bar should be used [on, off] (default: on)", ) log: Callable[..., Option] = partial( @@ -272,7 +268,7 @@ class PipOption(Option): dest="proxy", type="str", default="", - help="Specify a proxy in the form [user:passwd@]proxy.server:port.", + help="Specify a proxy in the form scheme://[user:passwd@]proxy.server:port.", ) retries: Callable[..., Option] = partial( @@ -753,6 +749,15 @@ def _handle_no_cache_dir( "if this option is used.", ) +check_build_deps: Callable[..., Option] = partial( + Option, + "--check-build-dependencies", + dest="check_build_deps", + action="store_true", + default=False, + help="Check the build dependencies when PEP517 is used.", +) + def _handle_no_use_pep517( option: Option, opt: str, value: str, parser: OptionParser @@ -775,6 +780,12 @@ def _handle_no_use_pep517( """ raise_option_error(parser, option=option, msg=msg) + # If user doesn't wish to use pep517, we check if setuptools is installed + # and raise error if it is not. + if not importlib.util.find_spec("setuptools"): + msg = "It is not possible to use --no-use-pep517 without setuptools installed." + raise_option_error(parser, option=option, msg=msg) + # Otherwise, --no-use-pep517 was passed via the command-line. parser.values.use_pep517 = False @@ -799,6 +810,33 @@ def _handle_no_use_pep517( help=SUPPRESS_HELP, ) + +def _handle_config_settings( + option: Option, opt_str: str, value: str, parser: OptionParser +) -> None: + key, sep, val = value.partition("=") + if sep != "=": + parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") # noqa + dest = getattr(parser.values, option.dest) + if dest is None: + dest = {} + setattr(parser.values, option.dest, dest) + dest[key] = val + + +config_settings: Callable[..., Option] = partial( + Option, + "--config-settings", + dest="config_settings", + type=str, + action="callback", + callback=_handle_config_settings, + metavar="settings", + help="Configuration settings to be passed to the PEP 517 build backend. " + "Settings take the form KEY=VALUE. Use multiple --config-settings options " + "to pass multiple keys to the backend.", +) + install_options: Callable[..., Option] = partial( Option, "--install-option", @@ -858,6 +896,15 @@ def _handle_no_use_pep517( "of pip is available for download. Implied with --no-index.", ) +root_user_action: Callable[..., Option] = partial( + Option, + "--root-user-action", + dest="root_user_action", + default="warn", + choices=["warn", "ignore"], + help="Action if pip is run as a root user. By default, a warning message is shown.", +) + def _handle_merge_hash( option: Option, opt_str: str, value: str, parser: OptionParser @@ -953,7 +1000,7 @@ def check_list_path_option(options: Values) -> None: metavar="feature", action="append", default=[], - choices=["2020-resolver", "fast-deps", "in-tree-build"], + choices=["2020-resolver", "fast-deps"], help="Enable new functionality, that may be backward incompatible.", ) @@ -966,7 +1013,6 @@ def check_list_path_option(options: Values) -> None: default=[], choices=[ "legacy-resolver", - "out-of-tree-build", "backtrack-on-build-failures", "html5lib", ], diff --git a/pipenv/patched/notpip/_internal/cli/command_context.py b/pipenv/patched/notpip/_internal/cli/command_context.py index ed68322376..139995ac3f 100644 --- a/pipenv/patched/notpip/_internal/cli/command_context.py +++ b/pipenv/patched/notpip/_internal/cli/command_context.py @@ -1,5 +1,5 @@ from contextlib import ExitStack, contextmanager -from typing import ContextManager, Iterator, TypeVar +from typing import ContextManager, Generator, TypeVar _T = TypeVar("_T", covariant=True) @@ -11,7 +11,7 @@ def __init__(self) -> None: self._main_context = ExitStack() @contextmanager - def main_context(self) -> Iterator[None]: + def main_context(self) -> Generator[None, None, None]: assert not self._in_main_context self._in_main_context = True diff --git a/pipenv/patched/notpip/_internal/cli/parser.py b/pipenv/patched/notpip/_internal/cli/parser.py index febf445c8b..dba1f10e3c 100644 --- a/pipenv/patched/notpip/_internal/cli/parser.py +++ b/pipenv/patched/notpip/_internal/cli/parser.py @@ -6,7 +6,7 @@ import sys import textwrap from contextlib import suppress -from typing import Any, Dict, Iterator, List, Tuple +from typing import Any, Dict, Generator, List, Tuple from pipenv.patched.notpip._internal.cli.status_codes import UNKNOWN_ERROR from pipenv.patched.notpip._internal.configuration import Configuration, ConfigurationError @@ -175,7 +175,9 @@ def check_default(self, option: optparse.Option, key: str, val: Any) -> Any: print(f"An error occurred during configuration: {exc}") sys.exit(3) - def _get_ordered_configuration_items(self) -> Iterator[Tuple[str, Any]]: + def _get_ordered_configuration_items( + self, + ) -> Generator[Tuple[str, Any], None, None]: # Configuration gives keys in an unordered manner. Order them. override_order = ["global", self.name, ":env:"] diff --git a/pipenv/patched/notpip/_internal/cli/progress_bars.py b/pipenv/patched/notpip/_internal/cli/progress_bars.py index 692a1b4c5c..794611dc8a 100644 --- a/pipenv/patched/notpip/_internal/cli/progress_bars.py +++ b/pipenv/patched/notpip/_internal/cli/progress_bars.py @@ -1,11 +1,6 @@ import functools -import itertools -import sys -from signal import SIGINT, default_int_handler, signal -from typing import Any, Callable, Iterator, Optional, Tuple +from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple -from pipenv.patched.notpip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar -from pipenv.patched.notpip._vendor.progress.spinner import Spinner from pipenv.patched.notpip._vendor.rich.progress import ( BarColumn, DownloadColumn, @@ -19,263 +14,17 @@ TransferSpeedColumn, ) -from pipenv.patched.notpip._internal.utils.compat import WINDOWS from pipenv.patched.notpip._internal.utils.logging import get_indentation -from pipenv.patched.notpip._internal.utils.misc import format_size -try: - from pipenv.patched.notpip._vendor import colorama -# Lots of different errors can come from this, including SystemError and -# ImportError. -except Exception: - colorama = None +DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]] -DownloadProgressRenderer = Callable[[Iterator[bytes]], Iterator[bytes]] - -def _select_progress_class(preferred: Bar, fallback: Bar) -> Bar: - encoding = getattr(preferred.file, "encoding", None) - - # If we don't know what encoding this file is in, then we'll just assume - # that it doesn't support unicode and use the ASCII bar. - if not encoding: - return fallback - - # Collect all of the possible characters we want to use with the preferred - # bar. - characters = [ - getattr(preferred, "empty_fill", ""), - getattr(preferred, "fill", ""), - ] - characters += list(getattr(preferred, "phases", [])) - - # Try to decode the characters we're using for the bar using the encoding - # of the given file, if this works then we'll assume that we can use the - # fancier bar and if not we'll fall back to the plaintext bar. - try: - "".join(characters).encode(encoding) - except UnicodeEncodeError: - return fallback - else: - return preferred - - -_BaseBar: Any = _select_progress_class(IncrementalBar, Bar) - - -class InterruptibleMixin: - """ - Helper to ensure that self.finish() gets called on keyboard interrupt. - - This allows downloads to be interrupted without leaving temporary state - (like hidden cursors) behind. - - This class is similar to the progress library's existing SigIntMixin - helper, but as of version 1.2, that helper has the following problems: - - 1. It calls sys.exit(). - 2. It discards the existing SIGINT handler completely. - 3. It leaves its own handler in place even after an uninterrupted finish, - which will have unexpected delayed effects if the user triggers an - unrelated keyboard interrupt some time after a progress-displaying - download has already completed, for example. - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - """ - Save the original SIGINT handler for later. - """ - # https://github.com/python/mypy/issues/5887 - super().__init__(*args, **kwargs) # type: ignore - - self.original_handler = signal(SIGINT, self.handle_sigint) - - # If signal() returns None, the previous handler was not installed from - # Python, and we cannot restore it. This probably should not happen, - # but if it does, we must restore something sensible instead, at least. - # The least bad option should be Python's default SIGINT handler, which - # just raises KeyboardInterrupt. - if self.original_handler is None: - self.original_handler = default_int_handler - - def finish(self) -> None: - """ - Restore the original SIGINT handler after finishing. - - This should happen regardless of whether the progress display finishes - normally, or gets interrupted. - """ - super().finish() # type: ignore - signal(SIGINT, self.original_handler) - - def handle_sigint(self, signum, frame): # type: ignore - """ - Call self.finish() before delegating to the original SIGINT handler. - - This handler should only be in place while the progress display is - active. - """ - self.finish() - self.original_handler(signum, frame) - - -class SilentBar(Bar): - def update(self) -> None: - pass - - -class BlueEmojiBar(IncrementalBar): - - suffix = "%(percent)d%%" - bar_prefix = " " - bar_suffix = " " - phases = ("\U0001F539", "\U0001F537", "\U0001F535") - - -class DownloadProgressMixin: - def __init__(self, *args: Any, **kwargs: Any) -> None: - # https://github.com/python/mypy/issues/5887 - super().__init__(*args, **kwargs) # type: ignore - self.message: str = (" " * (get_indentation() + 2)) + self.message - - @property - def downloaded(self) -> str: - return format_size(self.index) # type: ignore - - @property - def download_speed(self) -> str: - # Avoid zero division errors... - if self.avg == 0.0: # type: ignore - return "..." - return format_size(1 / self.avg) + "/s" # type: ignore - - @property - def pretty_eta(self) -> str: - if self.eta: # type: ignore - return f"eta {self.eta_td}" # type: ignore - return "" - - def iter(self, it): # type: ignore - for x in it: - yield x - # B305 is incorrectly raised here - # https://github.com/PyCQA/flake8-bugbear/issues/59 - self.next(len(x)) # noqa: B305 - self.finish() - - -class WindowsMixin: - def __init__(self, *args: Any, **kwargs: Any) -> None: - # The Windows terminal does not support the hide/show cursor ANSI codes - # even with colorama. So we'll ensure that hide_cursor is False on - # Windows. - # This call needs to go before the super() call, so that hide_cursor - # is set in time. The base progress bar class writes the "hide cursor" - # code to the terminal in its init, so if we don't set this soon - # enough, we get a "hide" with no corresponding "show"... - if WINDOWS and self.hide_cursor: # type: ignore - self.hide_cursor = False - - # https://github.com/python/mypy/issues/5887 - super().__init__(*args, **kwargs) # type: ignore - - # Check if we are running on Windows and we have the colorama module, - # if we do then wrap our file with it. - if WINDOWS and colorama: - self.file = colorama.AnsiToWin32(self.file) # type: ignore - # The progress code expects to be able to call self.file.isatty() - # but the colorama.AnsiToWin32() object doesn't have that, so we'll - # add it. - self.file.isatty = lambda: self.file.wrapped.isatty() - # The progress code expects to be able to call self.file.flush() - # but the colorama.AnsiToWin32() object doesn't have that, so we'll - # add it. - self.file.flush = lambda: self.file.wrapped.flush() - - -class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, DownloadProgressMixin): - - file = sys.stdout - message = "%(percent)d%%" - suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" - - -class DefaultDownloadProgressBar(BaseDownloadProgressBar, _BaseBar): - pass - - -class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): - pass - - -class DownloadBar(BaseDownloadProgressBar, Bar): - pass - - -class DownloadFillingCirclesBar(BaseDownloadProgressBar, FillingCirclesBar): - pass - - -class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, BlueEmojiBar): - pass - - -class DownloadProgressSpinner( - WindowsMixin, InterruptibleMixin, DownloadProgressMixin, Spinner -): - - file = sys.stdout - suffix = "%(downloaded)s %(download_speed)s" - - def next_phase(self) -> str: - if not hasattr(self, "_phaser"): - self._phaser = itertools.cycle(self.phases) - return next(self._phaser) - - def update(self) -> None: - message = self.message % self - phase = self.next_phase() - suffix = self.suffix % self - line = "".join( - [ - message, - " " if message else "", - phase, - " " if suffix else "", - suffix, - ] - ) - - self.writeln(line) - - -BAR_TYPES = { - "off": (DownloadSilentBar, DownloadSilentBar), - "on": (DefaultDownloadProgressBar, DownloadProgressSpinner), - "ascii": (DownloadBar, DownloadProgressSpinner), - "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner), - "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner), -} - - -def _legacy_progress_bar( - progress_bar: str, max: Optional[int] -) -> DownloadProgressRenderer: - if max is None or max == 0: - return BAR_TYPES[progress_bar][1]().iter # type: ignore - else: - return BAR_TYPES[progress_bar][0](max=max).iter - - -# -# Modern replacement, for our legacy progress bars. -# def _rich_progress_bar( - iterable: Iterator[bytes], + iterable: Iterable[bytes], *, bar_type: str, size: int, -) -> Iterator[bytes]: +) -> Generator[bytes, None, None]: assert bar_type == "on", "This should only be used in the default mode." if not size: @@ -315,7 +64,5 @@ def get_download_progress_renderer( """ if bar_type == "on": return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size) - elif bar_type == "off": - return iter # no-op, when passed an iterator else: - return _legacy_progress_bar(bar_type, size) + return iter # no-op, when passed an iterator diff --git a/pipenv/patched/notpip/_internal/cli/req_command.py b/pipenv/patched/notpip/_internal/cli/req_command.py index 366829ec04..58545d271d 100644 --- a/pipenv/patched/notpip/_internal/cli/req_command.py +++ b/pipenv/patched/notpip/_internal/cli/req_command.py @@ -22,6 +22,7 @@ from pipenv.patched.notpip._internal.models.selection_prefs import SelectionPreferences from pipenv.patched.notpip._internal.models.target_python import TargetPython from pipenv.patched.notpip._internal.network.session import PipSession +from pipenv.patched.notpip._internal.operations.build.build_tracker import BuildTracker from pipenv.patched.notpip._internal.operations.prepare import RequirementPreparer from pipenv.patched.notpip._internal.req.constructors import ( install_req_from_editable, @@ -31,7 +32,6 @@ ) from pipenv.patched.notpip._internal.req.req_file import parse_requirements from pipenv.patched.notpip._internal.req.req_install import InstallRequirement -from pipenv.patched.notpip._internal.req.req_tracker import RequirementTracker from pipenv.patched.notpip._internal.resolution.base import BaseResolver from pipenv.patched.notpip._internal.self_outdated_check import pip_self_version_check from pipenv.patched.notpip._internal.utils.deprecation import deprecated @@ -63,6 +63,9 @@ def _get_index_urls(cls, options: Values) -> Optional[List[str]]: url = getattr(options, "index_url", None) if url: index_urls.append(url) + urls = getattr(options, "extra_index_urls", None) + if urls: + index_urls.extend(urls) # Return None rather than an empty list return index_urls or None @@ -254,7 +257,7 @@ def make_requirement_preparer( cls, temp_build_dir: TempDirectory, options: Values, - req_tracker: RequirementTracker, + build_tracker: BuildTracker, session: PipSession, finder: PackageFinder, use_user_site: bool, @@ -285,33 +288,13 @@ def make_requirement_preparer( "fast-deps has no effect when used with the legacy resolver." ) - in_tree_build = "out-of-tree-build" not in options.deprecated_features_enabled - if "in-tree-build" in options.features_enabled: - deprecated( - reason="In-tree builds are now the default.", - replacement="to remove the --use-feature=in-tree-build flag", - gone_in="22.1", - ) - if "out-of-tree-build" in options.deprecated_features_enabled: - deprecated( - reason="Out-of-tree builds are deprecated.", - replacement=None, - gone_in="22.1", - ) - - if options.progress_bar not in {"on", "off"}: - deprecated( - reason="Custom progress bar styles are deprecated", - replacement="to use the default progress bar style.", - gone_in="22.1", - ) - return RequirementPreparer( build_dir=temp_build_dir_path, src_dir=options.src_dir, download_dir=download_dir, build_isolation=options.build_isolation, - req_tracker=req_tracker, + check_build_deps=options.check_build_deps, + build_tracker=build_tracker, session=session, progress_bar=options.progress_bar, finder=finder, @@ -319,7 +302,6 @@ def make_requirement_preparer( use_user_site=use_user_site, lazy_wheel=lazy_wheel, verbosity=verbosity, - in_tree_build=in_tree_build, ) @classmethod @@ -344,6 +326,7 @@ def make_resolver( install_req_from_req_string, isolated=options.isolated_mode, use_pep517=use_pep517, + config_settings=getattr(options, "config_settings", None), ) suppress_build_failures = cls.determine_build_failure_suppression(options) resolver_variant = cls.determine_resolver_variant(options) @@ -416,6 +399,7 @@ def get_requirements( isolated=options.isolated_mode, use_pep517=options.use_pep517, user_supplied=True, + config_settings=getattr(options, "config_settings", None), ) requirements.append(req_to_add) @@ -425,6 +409,7 @@ def get_requirements( user_supplied=True, isolated=options.isolated_mode, use_pep517=options.use_pep517, + config_settings=getattr(options, "config_settings", None), ) requirements.append(req_to_add) diff --git a/pipenv/patched/notpip/_internal/cli/spinners.py b/pipenv/patched/notpip/_internal/cli/spinners.py index 84f26c5db2..a07c05f6e5 100644 --- a/pipenv/patched/notpip/_internal/cli/spinners.py +++ b/pipenv/patched/notpip/_internal/cli/spinners.py @@ -3,9 +3,7 @@ import logging import sys import time -from typing import IO, Iterator - -from pipenv.patched.notpip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR +from typing import IO, Generator from pipenv.patched.notpip._internal.utils.compat import WINDOWS from pipenv.patched.notpip._internal.utils.logging import get_indentation @@ -115,7 +113,7 @@ def reset(self) -> None: @contextlib.contextmanager -def open_spinner(message: str) -> Iterator[SpinnerInterface]: +def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]: # Interactive spinner goes directly to sys.stdout rather than being routed # through the logging system, but it acts like it has level INFO, # i.e. it's only displayed if we're at level INFO or better. @@ -138,8 +136,12 @@ def open_spinner(message: str) -> Iterator[SpinnerInterface]: spinner.finish("done") +HIDE_CURSOR = "\x1b[?25l" +SHOW_CURSOR = "\x1b[?25h" + + @contextlib.contextmanager -def hidden_cursor(file: IO[str]) -> Iterator[None]: +def hidden_cursor(file: IO[str]) -> Generator[None, None, None]: # The Windows terminal does not support the hide/show cursor ANSI codes, # even via colorama. So don't even try. if WINDOWS: diff --git a/pipenv/patched/notpip/_internal/commands/completion.py b/pipenv/patched/notpip/_internal/commands/completion.py index dac731e530..685326f2ef 100644 --- a/pipenv/patched/notpip/_internal/commands/completion.py +++ b/pipenv/patched/notpip/_internal/commands/completion.py @@ -43,6 +43,28 @@ end complete -fa "(__fish_complete_pip)" -c {prog} """, + "powershell": """ + if ((Test-Path Function:\\TabExpansion) -and -not ` + (Test-Path Function:\\_pip_completeBackup)) {{ + Rename-Item Function:\\TabExpansion _pip_completeBackup + }} + function TabExpansion($line, $lastWord) {{ + $lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart() + if ($lastBlock.StartsWith("{prog} ")) {{ + $Env:COMP_WORDS=$lastBlock + $Env:COMP_CWORD=$lastBlock.Split().Length - 1 + $Env:PIP_AUTO_COMPLETE=1 + (& {prog}).Split() + Remove-Item Env:COMP_WORDS + Remove-Item Env:COMP_CWORD + Remove-Item Env:PIP_AUTO_COMPLETE + }} + elseif (Test-Path Function:\\_pip_completeBackup) {{ + # Fall back on existing tab expansion + _pip_completeBackup $line $lastWord + }} + }} + """, } @@ -76,6 +98,14 @@ def add_options(self) -> None: dest="shell", help="Emit completion code for fish", ) + self.cmd_opts.add_option( + "--powershell", + "-p", + action="store_const", + const="powershell", + dest="shell", + help="Emit completion code for powershell", + ) self.parser.insert_option_group(0, self.cmd_opts) diff --git a/pipenv/patched/notpip/_internal/commands/configuration.py b/pipenv/patched/notpip/_internal/commands/configuration.py index 0f15e54073..28f23a4194 100644 --- a/pipenv/patched/notpip/_internal/commands/configuration.py +++ b/pipenv/patched/notpip/_internal/commands/configuration.py @@ -27,11 +27,17 @@ class ConfigurationCommand(Command): - list: List the active configuration (or from the file specified) - edit: Edit the configuration file in an editor - - get: Get the value associated with name - - set: Set the name=value - - unset: Unset the value associated with name + - get: Get the value associated with command.option + - set: Set the command.option=value + - unset: Unset the value associated with command.option - debug: List the configuration files and values defined under them + Configuration keys should be dot separated command and option name, + with the special prefix "global" affecting any command. For example, + "pip config set global.index-url https://example.org/" would configure + the index url for all commands, but "pip config set download.timeout 10" + would configure a 10 second timeout only for "pip download" commands. + If none of --user, --global and --site are passed, a virtual environment configuration file is used if one is active and the file exists. Otherwise, all modifications happen to the user file by @@ -43,9 +49,9 @@ class ConfigurationCommand(Command): %prog [] list %prog [] [--editor ] edit - %prog [] get name - %prog [] set name value - %prog [] unset name + %prog [] get command.option + %prog [] set command.option value + %prog [] unset command.option %prog [] debug """ @@ -225,6 +231,10 @@ def open_in_editor(self, options: Values, args: List[str]) -> None: try: subprocess.check_call([editor, fname]) + except FileNotFoundError as e: + if not e.filename: + e.filename = editor + raise except subprocess.CalledProcessError as e: raise PipError( "Editor Subprocess exited with exit code {}".format(e.returncode) diff --git a/pipenv/patched/notpip/_internal/commands/debug.py b/pipenv/patched/notpip/_internal/commands/debug.py index 20e9e1fd5e..966dafa4f4 100644 --- a/pipenv/patched/notpip/_internal/commands/debug.py +++ b/pipenv/patched/notpip/_internal/commands/debug.py @@ -47,7 +47,7 @@ def create_vendor_txt_map() -> Dict[str, str]: ] # Transform into "module" -> version dict. - return dict(line.split("==", 1) for line in lines) # type: ignore + return dict(line.split("==", 1) for line in lines) def get_module_from_module_name(module_name: str) -> ModuleType: @@ -67,6 +67,7 @@ def get_vendor_version_from_module(module_name: str) -> Optional[str]: if not version: # Try to find version in debundled module info. + assert module.__file__ is not None env = get_environment([os.path.dirname(module.__file__)]) dist = env.get_distribution(module_name) if dist: diff --git a/pipenv/patched/notpip/_internal/commands/download.py b/pipenv/patched/notpip/_internal/commands/download.py index 2025bc784f..442094e6d6 100644 --- a/pipenv/patched/notpip/_internal/commands/download.py +++ b/pipenv/patched/notpip/_internal/commands/download.py @@ -7,7 +7,7 @@ from pipenv.patched.notpip._internal.cli.cmdoptions import make_target_python from pipenv.patched.notpip._internal.cli.req_command import RequirementCommand, with_cleanup from pipenv.patched.notpip._internal.cli.status_codes import SUCCESS -from pipenv.patched.notpip._internal.req.req_tracker import get_requirement_tracker +from pipenv.patched.notpip._internal.operations.build.build_tracker import get_build_tracker from pipenv.patched.notpip._internal.utils.misc import ensure_dir, normalize_path, write_output from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory @@ -49,6 +49,7 @@ def add_options(self) -> None: self.cmd_opts.add_option(cmdoptions.no_build_isolation()) self.cmd_opts.add_option(cmdoptions.use_pep517()) self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.check_build_deps()) self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) self.cmd_opts.add_option( @@ -95,7 +96,7 @@ def run(self, options: Values, args: List[str]) -> int: ignore_requires_python=options.ignore_requires_python, ) - req_tracker = self.enter_context(get_requirement_tracker()) + build_tracker = self.enter_context(get_build_tracker()) directory = TempDirectory( delete=not options.no_clean, @@ -108,7 +109,7 @@ def run(self, options: Values, args: List[str]) -> int: preparer = self.make_requirement_preparer( temp_build_dir=directory, options=options, - req_tracker=req_tracker, + build_tracker=build_tracker, session=session, finder=finder, download_dir=options.download_dir, diff --git a/pipenv/patched/notpip/_internal/commands/install.py b/pipenv/patched/notpip/_internal/commands/install.py index ec8a695a80..af96617a18 100644 --- a/pipenv/patched/notpip/_internal/commands/install.py +++ b/pipenv/patched/notpip/_internal/commands/install.py @@ -21,10 +21,10 @@ from pipenv.patched.notpip._internal.locations import get_scheme from pipenv.patched.notpip._internal.metadata import get_environment from pipenv.patched.notpip._internal.models.format_control import FormatControl +from pipenv.patched.notpip._internal.operations.build.build_tracker import get_build_tracker from pipenv.patched.notpip._internal.operations.check import ConflictDetails, check_install_conflicts from pipenv.patched.notpip._internal.req import install_given_reqs from pipenv.patched.notpip._internal.req.req_install import InstallRequirement -from pipenv.patched.notpip._internal.req.req_tracker import get_requirement_tracker from pipenv.patched.notpip._internal.utils.compat import WINDOWS from pipenv.patched.notpip._internal.utils.distutils_args import parse_distutils_args from pipenv.patched.notpip._internal.utils.filesystem import test_writable_dir @@ -189,7 +189,9 @@ def add_options(self) -> None: self.cmd_opts.add_option(cmdoptions.no_build_isolation()) self.cmd_opts.add_option(cmdoptions.use_pep517()) self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.check_build_deps()) + self.cmd_opts.add_option(cmdoptions.config_settings()) self.cmd_opts.add_option(cmdoptions.install_options()) self.cmd_opts.add_option(cmdoptions.global_options()) @@ -222,12 +224,12 @@ def add_options(self) -> None: default=True, help="Do not warn about broken dependencies", ) - self.cmd_opts.add_option(cmdoptions.no_binary()) self.cmd_opts.add_option(cmdoptions.only_binary()) self.cmd_opts.add_option(cmdoptions.prefer_binary()) self.cmd_opts.add_option(cmdoptions.require_hashes()) self.cmd_opts.add_option(cmdoptions.progress_bar()) + self.cmd_opts.add_option(cmdoptions.root_user_action()) index_opts = cmdoptions.make_option_group( cmdoptions.index_group, @@ -293,7 +295,7 @@ def run(self, options: Values, args: List[str]) -> int: ) wheel_cache = WheelCache(options.cache_dir, options.format_control) - req_tracker = self.enter_context(get_requirement_tracker()) + build_tracker = self.enter_context(get_build_tracker()) directory = TempDirectory( delete=not options.no_clean, @@ -315,7 +317,7 @@ def run(self, options: Values, args: List[str]) -> int: preparer = self.make_requirement_preparer( temp_build_dir=directory, options=options, - req_tracker=req_tracker, + build_tracker=build_tracker, session=session, finder=finder, use_user_site=options.use_user_site, @@ -464,8 +466,8 @@ def run(self, options: Values, args: List[str]) -> int: self._handle_target_dir( options.target_dir, target_temp_dir, options.upgrade ) - - warn_if_run_as_root() + if options.root_user_action == "warn": + warn_if_run_as_root() return SUCCESS def _handle_target_dir( diff --git a/pipenv/patched/notpip/_internal/commands/list.py b/pipenv/patched/notpip/_internal/commands/list.py index 6ec466bd19..9dc35dbe06 100644 --- a/pipenv/patched/notpip/_internal/commands/list.py +++ b/pipenv/patched/notpip/_internal/commands/list.py @@ -1,7 +1,7 @@ import json import logging from optparse import Values -from typing import TYPE_CHECKING, Iterator, List, Optional, Sequence, Tuple, cast +from typing import TYPE_CHECKING, Generator, List, Optional, Sequence, Tuple, cast from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name @@ -222,7 +222,7 @@ def get_not_required( def iter_packages_latest_infos( self, packages: "_ProcessedDists", options: Values - ) -> Iterator["_DistWithLatestInfo"]: + ) -> Generator["_DistWithLatestInfo", None, None]: with self._build_session(options) as session: finder = self._build_package_finder(options, session) diff --git a/pipenv/patched/notpip/_internal/commands/show.py b/pipenv/patched/notpip/_internal/commands/show.py index ed99966e2f..8eaf614a52 100644 --- a/pipenv/patched/notpip/_internal/commands/show.py +++ b/pipenv/patched/notpip/_internal/commands/show.py @@ -1,6 +1,6 @@ import logging from optparse import Values -from typing import Iterator, List, NamedTuple, Optional +from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name @@ -60,6 +60,7 @@ class _PackageInfo(NamedTuple): classifiers: List[str] summary: str homepage: str + project_urls: List[str] author: str author_email: str license: str @@ -67,7 +68,7 @@ class _PackageInfo(NamedTuple): files: Optional[List[str]] -def search_packages_info(query: List[str]) -> Iterator[_PackageInfo]: +def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]: """ Gather details from installed distributions. Print distribution name, version, location, and installed files. Installed files requires a @@ -76,7 +77,7 @@ def search_packages_info(query: List[str]) -> Iterator[_PackageInfo]: """ env = get_default_environment() - installed = {dist.canonical_name: dist for dist in env.iter_distributions()} + installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()} query_names = [canonicalize_name(name) for name in query] missing = sorted( [name for name, pkg in zip(query, query_names) if pkg not in installed] @@ -126,6 +127,7 @@ def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]: classifiers=metadata.get_all("Classifier", []), summary=metadata.get("Summary", ""), homepage=metadata.get("Home-page", ""), + project_urls=metadata.get_all("Project-URL", []), author=metadata.get("Author", ""), author_email=metadata.get("Author-email", ""), license=metadata.get("License", ""), @@ -135,7 +137,7 @@ def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]: def print_results( - distributions: Iterator[_PackageInfo], + distributions: Iterable[_PackageInfo], list_files: bool, verbose: bool, ) -> bool: @@ -168,6 +170,9 @@ def print_results( write_output("Entry-points:") for entry in dist.entry_points: write_output(" %s", entry.strip()) + write_output("Project-URLs:") + for project_url in dist.project_urls: + write_output(" %s", project_url) if list_files: write_output("Files:") if dist.files is None: diff --git a/pipenv/patched/notpip/_internal/commands/uninstall.py b/pipenv/patched/notpip/_internal/commands/uninstall.py index 2b67216fcd..8d39ea5de2 100644 --- a/pipenv/patched/notpip/_internal/commands/uninstall.py +++ b/pipenv/patched/notpip/_internal/commands/uninstall.py @@ -4,6 +4,7 @@ from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name +from pipenv.patched.notpip._internal.cli import cmdoptions from pipenv.patched.notpip._internal.cli.base_command import Command from pipenv.patched.notpip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root from pipenv.patched.notpip._internal.cli.status_codes import SUCCESS @@ -53,7 +54,7 @@ def add_options(self) -> None: action="store_true", help="Don't ask for confirmation of uninstall deletions.", ) - + self.cmd_opts.add_option(cmdoptions.root_user_action()) self.parser.insert_option_group(0, self.cmd_opts) def run(self, options: Values, args: List[str]) -> int: @@ -100,6 +101,6 @@ def run(self, options: Values, args: List[str]) -> int: ) if uninstall_pathset: uninstall_pathset.commit() - - warn_if_run_as_root() + if options.root_user_action == "warn": + warn_if_run_as_root() return SUCCESS diff --git a/pipenv/patched/notpip/_internal/commands/wheel.py b/pipenv/patched/notpip/_internal/commands/wheel.py index 3962a371ce..9086fe720b 100644 --- a/pipenv/patched/notpip/_internal/commands/wheel.py +++ b/pipenv/patched/notpip/_internal/commands/wheel.py @@ -9,8 +9,8 @@ from pipenv.patched.notpip._internal.cli.req_command import RequirementCommand, with_cleanup from pipenv.patched.notpip._internal.cli.status_codes import SUCCESS from pipenv.patched.notpip._internal.exceptions import CommandError +from pipenv.patched.notpip._internal.operations.build.build_tracker import get_build_tracker from pipenv.patched.notpip._internal.req.req_install import InstallRequirement -from pipenv.patched.notpip._internal.req.req_tracker import get_requirement_tracker from pipenv.patched.notpip._internal.utils.misc import ensure_dir, normalize_path from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory from pipenv.patched.notpip._internal.wheel_builder import build, should_build_for_wheel_command @@ -26,10 +26,8 @@ class WheelCommand(RequirementCommand): recompiling your software during every install. For more details, see the wheel docs: https://wheel.readthedocs.io/en/latest/ - Requirements: setuptools>=0.8, and wheel. - - 'pip wheel' uses the bdist_wheel setuptools extension from the wheel - package to build individual wheels. + 'pip wheel' uses the build system interface as described here: + https://pip.pypa.io/en/stable/reference/build-system/ """ @@ -59,6 +57,7 @@ def add_options(self) -> None: self.cmd_opts.add_option(cmdoptions.no_build_isolation()) self.cmd_opts.add_option(cmdoptions.use_pep517()) self.cmd_opts.add_option(cmdoptions.no_use_pep517()) + self.cmd_opts.add_option(cmdoptions.check_build_deps()) self.cmd_opts.add_option(cmdoptions.constraints()) self.cmd_opts.add_option(cmdoptions.editable()) self.cmd_opts.add_option(cmdoptions.requirements()) @@ -75,6 +74,7 @@ def add_options(self) -> None: help="Don't verify if built wheel is valid.", ) + self.cmd_opts.add_option(cmdoptions.config_settings()) self.cmd_opts.add_option(cmdoptions.build_options()) self.cmd_opts.add_option(cmdoptions.global_options()) @@ -110,7 +110,7 @@ def run(self, options: Values, args: List[str]) -> int: options.wheel_dir = normalize_path(options.wheel_dir) ensure_dir(options.wheel_dir) - req_tracker = self.enter_context(get_requirement_tracker()) + build_tracker = self.enter_context(get_build_tracker()) directory = TempDirectory( delete=not options.no_clean, @@ -123,7 +123,7 @@ def run(self, options: Values, args: List[str]) -> int: preparer = self.make_requirement_preparer( temp_build_dir=directory, options=options, - req_tracker=req_tracker, + build_tracker=build_tracker, session=session, finder=finder, download_dir=options.wheel_dir, diff --git a/pipenv/patched/notpip/_internal/configuration.py b/pipenv/patched/notpip/_internal/configuration.py index 2a40f65415..4b836ca118 100644 --- a/pipenv/patched/notpip/_internal/configuration.py +++ b/pipenv/patched/notpip/_internal/configuration.py @@ -142,13 +142,19 @@ def items(self) -> Iterable[Tuple[str, Any]]: def get_value(self, key: str) -> Any: """Get a value from the configuration.""" + orig_key = key + key = _normalize_name(key) try: return self._dictionary[key] except KeyError: - raise ConfigurationError(f"No such key - {key}") + # disassembling triggers a more useful error message than simply + # "No such key" in the case that the key isn't in the form command.option + _disassemble_key(key) + raise ConfigurationError(f"No such key - {orig_key}") def set_value(self, key: str, value: Any) -> None: """Modify a value in the configuration.""" + key = _normalize_name(key) self._ensure_have_load_only() assert self.load_only @@ -167,11 +173,13 @@ def set_value(self, key: str, value: Any) -> None: def unset_value(self, key: str) -> None: """Unset a value in the configuration.""" + orig_key = key + key = _normalize_name(key) self._ensure_have_load_only() assert self.load_only if key not in self._config[self.load_only]: - raise ConfigurationError(f"No such key - {key}") + raise ConfigurationError(f"No such key - {orig_key}") fname, parser = self._get_parser_to_modify() diff --git a/pipenv/patched/notpip/_internal/distributions/base.py b/pipenv/patched/notpip/_internal/distributions/base.py index 3dbc7f2363..c5ea712601 100644 --- a/pipenv/patched/notpip/_internal/distributions/base.py +++ b/pipenv/patched/notpip/_internal/distributions/base.py @@ -31,6 +31,9 @@ def get_metadata_distribution(self) -> BaseDistribution: @abc.abstractmethod def prepare_distribution_metadata( - self, finder: PackageFinder, build_isolation: bool + self, + finder: PackageFinder, + build_isolation: bool, + check_build_deps: bool, ) -> None: raise NotImplementedError() diff --git a/pipenv/patched/notpip/_internal/distributions/installed.py b/pipenv/patched/notpip/_internal/distributions/installed.py index b7184c6368..43fdcdebb0 100644 --- a/pipenv/patched/notpip/_internal/distributions/installed.py +++ b/pipenv/patched/notpip/_internal/distributions/installed.py @@ -15,6 +15,9 @@ def get_metadata_distribution(self) -> BaseDistribution: return self.req.satisfied_by def prepare_distribution_metadata( - self, finder: PackageFinder, build_isolation: bool + self, + finder: PackageFinder, + build_isolation: bool, + check_build_deps: bool, ) -> None: pass diff --git a/pipenv/patched/notpip/_internal/distributions/sdist.py b/pipenv/patched/notpip/_internal/distributions/sdist.py index d2add10ba4..bd80087f15 100644 --- a/pipenv/patched/notpip/_internal/distributions/sdist.py +++ b/pipenv/patched/notpip/_internal/distributions/sdist.py @@ -22,7 +22,10 @@ def get_metadata_distribution(self) -> BaseDistribution: return self.req.get_dist() def prepare_distribution_metadata( - self, finder: PackageFinder, build_isolation: bool + self, + finder: PackageFinder, + build_isolation: bool, + check_build_deps: bool, ) -> None: # Load pyproject.toml, to determine whether PEP 517 is to be used self.req.load_pyproject_toml() @@ -43,7 +46,18 @@ def prepare_distribution_metadata( self.req.isolated_editable_sanity_check() # Install the dynamic build requirements. self._install_build_reqs(finder) - + # Check if the current environment provides build dependencies + should_check_deps = self.req.use_pep517 and check_build_deps + if should_check_deps: + pyproject_requires = self.req.pyproject_requires + assert pyproject_requires is not None + conflicting, missing = self.req.build_env.check_requirements( + pyproject_requires + ) + if conflicting: + self._raise_conflicts("the backend dependencies", conflicting) + if missing: + self._raise_missing_reqs(missing) self.req.prepare_metadata() def _prepare_build_backend(self, finder: PackageFinder) -> None: @@ -125,3 +139,12 @@ def _raise_conflicts( ), ) raise InstallationError(error_message) + + def _raise_missing_reqs(self, missing: Set[str]) -> None: + format_string = ( + "Some build dependencies for {requirement} are missing: {missing}." + ) + error_message = format_string.format( + requirement=self.req, missing=", ".join(map(repr, sorted(missing))) + ) + raise InstallationError(error_message) diff --git a/pipenv/patched/notpip/_internal/distributions/wheel.py b/pipenv/patched/notpip/_internal/distributions/wheel.py index 7b5e17a2a0..9f2ec06ff7 100644 --- a/pipenv/patched/notpip/_internal/distributions/wheel.py +++ b/pipenv/patched/notpip/_internal/distributions/wheel.py @@ -26,6 +26,9 @@ def get_metadata_distribution(self) -> BaseDistribution: return get_wheel_distribution(wheel, canonicalize_name(self.req.name)) def prepare_distribution_metadata( - self, finder: PackageFinder, build_isolation: bool + self, + finder: PackageFinder, + build_isolation: bool, + check_build_deps: bool, ) -> None: pass diff --git a/pipenv/patched/notpip/_internal/index/package_finder.py b/pipenv/patched/notpip/_internal/index/package_finder.py index 46f3d48036..d15aedb4aa 100644 --- a/pipenv/patched/notpip/_internal/index/package_finder.py +++ b/pipenv/patched/notpip/_internal/index/package_finder.py @@ -3,6 +3,7 @@ # The following comment should be removed at some point in the future. # mypy: strict-optional=False +import enum import functools import itertools import logging @@ -94,6 +95,16 @@ def _check_link_requires_python( return True +class LinkType(enum.Enum): + candidate = enum.auto() + different_project = enum.auto() + yanked = enum.auto() + format_unsupported = enum.auto() + format_invalid = enum.auto() + platform_mismatch = enum.auto() + requires_python_mismatch = enum.auto() + + class LinkEvaluator: """ @@ -137,31 +148,30 @@ def __init__( """ if ignore_requires_python is None: ignore_requires_python = False - if ignore_compatibility is None: - ignore_compatibility = True self._allow_yanked = allow_yanked self._canonical_name = canonical_name self._ignore_requires_python = ignore_requires_python self._formats = formats self._target_python = target_python + self._ignore_compatibility = ignore_compatibility self.project_name = project_name - self._ignore_compatibility = ignore_compatibility - def evaluate_link(self, link: Link) -> Tuple[bool, Optional[str]]: + def evaluate_link(self, link: Link) -> Tuple[LinkType, str]: """ Determine whether a link is a candidate for installation. - :return: A tuple (is_candidate, result), where `result` is (1) a - version string if `is_candidate` is True, and (2) if - `is_candidate` is False, an optional string to log the reason - the link fails to qualify. + :return: A tuple (result, detail), where *result* is an enum + representing whether the evaluation found a candidate, or the reason + why one is not found. If a candidate is found, *detail* will be the + candidate's version string; if one is not found, it contains the + reason the link fails to qualify. """ version = None if link.is_yanked and not self._allow_yanked: reason = link.yanked_reason or "" - return (False, f"yanked for reason: {reason}") + return (LinkType.yanked, f"yanked for reason: {reason}") if link.egg_fragment: egg_info = link.egg_fragment @@ -169,42 +179,46 @@ def evaluate_link(self, link: Link) -> Tuple[bool, Optional[str]]: else: egg_info, ext = link.splitext() if not ext: - return (False, "not a file") + return (LinkType.format_unsupported, "not a file") if ext not in SUPPORTED_EXTENSIONS: - return (False, f"unsupported archive format: {ext}") + return ( + LinkType.format_unsupported, + f"unsupported archive format: {ext}", + ) if "binary" not in self._formats and ext == WHEEL_EXTENSION and not self._ignore_compatibility: - reason = "No binaries permitted for {}".format(self.project_name) - return (False, reason) - if "macosx10" in link.path and ext == '.zip' and not self._ignore_compatibility: - return (False, "macosx10 one") + reason = f"No binaries permitted for {self.project_name}" + return (LinkType.format_unsupported, reason) + if "macosx10" in link.path and ext == ".zip" and not self._ignore_compatibility: + return (LinkType.format_unsupported, "macosx10 one") if ext == WHEEL_EXTENSION: try: wheel = Wheel(link.filename) except InvalidWheelFilename: - return (False, "invalid wheel filename") + return ( + LinkType.format_invalid, + "invalid wheel filename", + ) if canonicalize_name(wheel.name) != self._canonical_name: - reason = "wrong project name (not {})".format(self.project_name) - return (False, reason) + reason = f"wrong project name (not {self.project_name})" + return (LinkType.different_project, reason) supported_tags = self._target_python.get_tags() if not wheel.supported(supported_tags) and not self._ignore_compatibility: # Include the wheel's tags in the reason string to # simplify troubleshooting compatibility issues. - file_tags = wheel.get_formatted_file_tags() + file_tags = ", ".join(wheel.get_formatted_file_tags()) reason = ( - "none of the wheel's tags ({}) are compatible " - "(run pip debug --verbose to show compatible tags)".format( - ", ".join(file_tags) - ) + f"none of the wheel's tags ({file_tags}) are compatible " + f"(run pip debug --verbose to show compatible tags)" ) - return (False, reason) + return (LinkType.platform_mismatch, reason) version = wheel.version # This should be up by the self.ok_binary check, but see issue 2700. if "source" not in self._formats and ext != WHEEL_EXTENSION: reason = f"No sources permitted for {self.project_name}" - return (False, reason) + return (LinkType.format_unsupported, reason) if not version: version = _extract_version_from_fragment( @@ -213,14 +227,17 @@ def evaluate_link(self, link: Link) -> Tuple[bool, Optional[str]]: ) if not version: reason = f"Missing project version for {self.project_name}" - return (False, reason) + return (LinkType.format_invalid, reason) match = self._py_version_re.search(version) if match: version = version[: match.start()] py_version = match.group(1) if py_version != self._target_python.py_version: - return (False, "Python version is incorrect") + return ( + LinkType.platform_mismatch, + "Python version is incorrect", + ) supports_python = _check_link_requires_python( link, @@ -228,13 +245,12 @@ def evaluate_link(self, link: Link) -> Tuple[bool, Optional[str]]: ignore_requires_python=self._ignore_requires_python, ) if not supports_python and not self._ignore_compatibility: - # Return None for the reason text to suppress calling - # _log_skipped_link(). - return (False, None) + reason = f"{version} Requires-Python {link.requires_python}" + return (LinkType.requires_python_mismatch, reason) logger.debug("Found link %s, version: %s", link, version) - return (True, version) + return (LinkType.candidate, version) def filter_unallowed_hashes( @@ -624,7 +640,7 @@ def __init__( self.format_control = format_control # These are boring links that have already been logged somehow. - self._logged_links: Set[Link] = set() + self._logged_links: Set[Tuple[Link, LinkType, str]] = set() # Don't include an allow_yanked default value to make sure each call # site considers whether yanked releases are allowed. This also causes @@ -704,6 +720,14 @@ def prefer_binary(self) -> bool: def set_prefer_binary(self) -> None: self._candidate_prefs.prefer_binary = True + def requires_python_skipped_reasons(self) -> List[str]: + reasons = { + detail + for _, result, detail in self._logged_links + if result == LinkType.requires_python_mismatch + } + return sorted(reasons) + def make_link_evaluator(self, project_name: str) -> LinkEvaluator: canonical_name = canonicalize_name(project_name) formats = self.format_control.get_allowed_formats(canonical_name) @@ -734,12 +758,13 @@ def _sort_links(self, links: Iterable[Link]) -> List[Link]: no_eggs.append(link) return no_eggs + eggs - def _log_skipped_link(self, link: Link, reason: str) -> None: - if link not in self._logged_links: + def _log_skipped_link(self, link: Link, result: LinkType, detail: str) -> None: + entry = (link, result, detail) + if entry not in self._logged_links: # Put the link at the end so the reason is more visible and because # the link string is usually very long. - logger.debug("Skipping link: %s: %s", reason, link) - self._logged_links.add(link) + logger.debug("Skipping link: %s: %s", detail, link) + self._logged_links.add(entry) def get_install_candidate( self, link_evaluator: LinkEvaluator, link: Link @@ -748,16 +773,15 @@ def get_install_candidate( If the link is a candidate for install, convert it to an InstallationCandidate and return it. Otherwise, return None. """ - is_candidate, result = link_evaluator.evaluate_link(link) - if not is_candidate: - if result: - self._log_skipped_link(link, reason=result) + result, detail = link_evaluator.evaluate_link(link) + if result != LinkType.candidate: + self._log_skipped_link(link, result, detail) return None return InstallationCandidate( name=link_evaluator.project_name, link=link, - version=result, + version=detail, ) def evaluate_links( diff --git a/pipenv/patched/notpip/_internal/locations/__init__.py b/pipenv/patched/notpip/_internal/locations/__init__.py index 73eb9bafb0..bc52ec8964 100644 --- a/pipenv/patched/notpip/_internal/locations/__init__.py +++ b/pipenv/patched/notpip/_internal/locations/__init__.py @@ -4,7 +4,7 @@ import pathlib import sys import sysconfig -from typing import Any, Dict, Iterator, List, Optional, Tuple +from typing import Any, Dict, Generator, List, Optional, Tuple from pipenv.patched.notpip._internal.models.scheme import SCHEME_KEYS, Scheme from pipenv.patched.notpip._internal.utils.compat import WINDOWS @@ -70,9 +70,10 @@ def _should_use_sysconfig() -> bool: def _looks_like_bpo_44860() -> bool: """The resolution to bpo-44860 will change this incorrect platlib. + See . """ - from distutils.command.install import INSTALL_SCHEMES # type: ignore + from distutils.command.install import INSTALL_SCHEMES try: unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"] @@ -97,7 +98,7 @@ def _looks_like_red_hat_lib() -> bool: This is the only way I can see to tell a Red Hat-patched Python. """ - from distutils.command.install import INSTALL_SCHEMES # type: ignore + from distutils.command.install import INSTALL_SCHEMES return all( k in INSTALL_SCHEMES @@ -109,7 +110,7 @@ def _looks_like_red_hat_lib() -> bool: @functools.lru_cache(maxsize=None) def _looks_like_debian_scheme() -> bool: """Debian adds two additional schemes.""" - from distutils.command.install import INSTALL_SCHEMES # type: ignore + from distutils.command.install import INSTALL_SCHEMES return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES @@ -137,6 +138,7 @@ def _looks_like_red_hat_scheme() -> bool: @functools.lru_cache(maxsize=None) def _looks_like_slackware_scheme() -> bool: """Slackware patches sysconfig but fails to patch distutils and site. + Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib path, but does not do the same to the site module. """ @@ -152,9 +154,11 @@ def _looks_like_slackware_scheme() -> bool: @functools.lru_cache(maxsize=None) def _looks_like_msys2_mingw_scheme() -> bool: """MSYS2 patches distutils and sysconfig to use a UNIX-like scheme. + However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is likely going to be included in their 3.10 release, so we ignore the warning. See msys2/MINGW-packages#9319. + MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase, and is missing the final ``"site-packages"``. """ @@ -165,9 +169,9 @@ def _looks_like_msys2_mingw_scheme() -> bool: ) -def _fix_abiflags(parts: Tuple[str]) -> Iterator[str]: +def _fix_abiflags(parts: Tuple[str]) -> Generator[str, None, None]: ldversion = sysconfig.get_config_var("LDVERSION") - abiflags: str = getattr(sys, "abiflags", None) + abiflags = getattr(sys, "abiflags", None) # LDVERSION does not end with sys.abiflags. Just return the path unchanged. if not ldversion or not abiflags or not ldversion.endswith(abiflags): diff --git a/pipenv/patched/notpip/_internal/locations/_distutils.py b/pipenv/patched/notpip/_internal/locations/_distutils.py index f5481ea0c0..172ef8ac1c 100644 --- a/pipenv/patched/notpip/_internal/locations/_distutils.py +++ b/pipenv/patched/notpip/_internal/locations/_distutils.py @@ -84,7 +84,7 @@ def distutils_scheme( if home: prefix = home elif user: - prefix = i.install_userbase # type: ignore + prefix = i.install_userbase else: prefix = i.prefix scheme["headers"] = os.path.join( diff --git a/pipenv/patched/notpip/_internal/metadata/__init__.py b/pipenv/patched/notpip/_internal/metadata/__init__.py index cc037c14f0..146db52d90 100644 --- a/pipenv/patched/notpip/_internal/metadata/__init__.py +++ b/pipenv/patched/notpip/_internal/metadata/__init__.py @@ -1,7 +1,18 @@ -from typing import List, Optional +import contextlib +import functools +import os +import sys +from typing import TYPE_CHECKING, List, Optional, Type, cast + +from pipenv.patched.notpip._internal.utils.misc import strtobool from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel +if TYPE_CHECKING: + from typing import Protocol +else: + Protocol = object + __all__ = [ "BaseDistribution", "BaseEnvironment", @@ -11,9 +22,49 @@ "get_default_environment", "get_environment", "get_wheel_distribution", + "select_backend", ] +def _should_use_importlib_metadata() -> bool: + """Whether to use the ``importlib.metadata`` or ``pkg_resources`` backend. + + By default, pip uses ``importlib.metadata`` on Python 3.11+, and + ``pkg_resourcess`` otherwise. This can be overriden by a couple of ways: + + * If environment variable ``_PIP_USE_IMPORTLIB_METADATA`` is set, it + dictates whether ``importlib.metadata`` is used, regardless of Python + version. + * On Python 3.11+, Python distributors can patch ``importlib.metadata`` + to add a global constant ``_PIP_USE_IMPORTLIB_METADATA = False``. This + makes pip use ``pkg_resources`` (unless the user set the aforementioned + environment variable to *True*). + """ + with contextlib.suppress(KeyError, ValueError): + return bool(strtobool(os.environ["_PIP_USE_IMPORTLIB_METADATA"])) + if sys.version_info < (3, 11): + return False + import importlib.metadata + + return bool(getattr(importlib.metadata, "_PIP_USE_IMPORTLIB_METADATA", True)) + + +class Backend(Protocol): + Distribution: Type[BaseDistribution] + Environment: Type[BaseEnvironment] + + +@functools.lru_cache(maxsize=None) +def select_backend() -> Backend: + if _should_use_importlib_metadata(): + from . import importlib + + return cast(Backend, importlib) + from . import pkg_resources + + return cast(Backend, pkg_resources) + + def get_default_environment() -> BaseEnvironment: """Get the default representation for the current environment. @@ -21,9 +72,7 @@ def get_default_environment() -> BaseEnvironment: Environment instance should be built from ``sys.path`` and may use caching to share instance state accorss calls. """ - from .pkg_resources import Environment - - return Environment.default() + return select_backend().Environment.default() def get_environment(paths: Optional[List[str]]) -> BaseEnvironment: @@ -33,9 +82,7 @@ def get_environment(paths: Optional[List[str]]) -> BaseEnvironment: given import paths. The backend must build a fresh instance representing the state of installed distributions when this function is called. """ - from .pkg_resources import Environment - - return Environment.from_paths(paths) + return select_backend().Environment.from_paths(paths) def get_directory_distribution(directory: str) -> BaseDistribution: @@ -44,9 +91,7 @@ def get_directory_distribution(directory: str) -> BaseDistribution: This returns a Distribution instance from the chosen backend based on the given on-disk ``.dist-info`` directory. """ - from .pkg_resources import Distribution - - return Distribution.from_directory(directory) + return select_backend().Distribution.from_directory(directory) def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistribution: @@ -57,6 +102,4 @@ def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistributio :param canonical_name: Normalized project name of the given wheel. """ - from .pkg_resources import Distribution - - return Distribution.from_wheel(wheel, canonical_name) + return select_backend().Distribution.from_wheel(wheel, canonical_name) diff --git a/pipenv/patched/notpip/_internal/metadata/base.py b/pipenv/patched/notpip/_internal/metadata/base.py index 1ff7c95113..cf901f4b18 100644 --- a/pipenv/patched/notpip/_internal/metadata/base.py +++ b/pipenv/patched/notpip/_internal/metadata/base.py @@ -31,10 +31,7 @@ DirectUrlValidationError, ) from pipenv.patched.notpip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here. -from pipenv.patched.notpip._internal.utils.egg_link import ( - egg_link_path_from_location, - egg_link_path_from_sys_path, -) +from pipenv.patched.notpip._internal.utils.egg_link import egg_link_path_from_sys_path from pipenv.patched.notpip._internal.utils.misc import is_local, normalize_path from pipenv.patched.notpip._internal.utils.urls import url_to_path @@ -45,7 +42,7 @@ DistributionVersion = Union[LegacyVersion, Version] -InfoPath = Union[str, pathlib.PurePosixPath] +InfoPath = Union[str, pathlib.PurePath] logger = logging.getLogger(__name__) @@ -95,6 +92,28 @@ def _convert_installed_files_path( class BaseDistribution(Protocol): + @classmethod + def from_directory(cls, directory: str) -> "BaseDistribution": + """Load the distribution from a metadata directory. + + :param directory: Path to a metadata directory, e.g. ``.dist-info``. + """ + raise NotImplementedError() + + @classmethod + def from_wheel(cls, wheel: "Wheel", name: str) -> "BaseDistribution": + """Load the distribution from a given wheel. + + :param wheel: A concrete wheel definition. + :param name: File name of the wheel. + + :raises InvalidWheel: Whenever loading of the wheel causes a + :py:exc:`zipfile.BadZipFile` exception to be thrown. + :raises UnsupportedWheel: If the wheel is a valid zip, but malformed + internally. + """ + raise NotImplementedError() + def __repr__(self) -> str: return f"{self.raw_name} {self.version} ({self.location})" @@ -148,14 +167,7 @@ def installed_location(self) -> Optional[str]: The returned location is normalized (in particular, with symlinks removed). """ - egg_link = egg_link_path_from_location(self.raw_name) - if egg_link: - location = egg_link - elif self.location: - location = self.location - else: - return None - return normalize_path(location) + raise NotImplementedError() @property def info_location(self) -> Optional[str]: @@ -316,21 +328,19 @@ def is_file(self, path: InfoPath) -> bool: """Check whether an entry in the info directory is a file.""" raise NotImplementedError() - def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]: - """Iterate through a directory in the info directory. + def iter_distutils_script_names(self) -> Iterator[str]: + """Find distutils 'scripts' entries metadata. - Each item yielded would be a path relative to the info directory. - - :raise FileNotFoundError: If ``name`` does not exist in the directory. - :raise NotADirectoryError: If ``name`` does not point to a directory. + If 'scripts' is supplied in ``setup.py``, distutils records those in the + installed distribution's ``scripts`` directory, a file for each script. """ raise NotImplementedError() def read_text(self, path: InfoPath) -> str: """Read a file in the info directory. - :raise FileNotFoundError: If ``name`` does not exist in the directory. - :raise NoneMetadataError: If ``name`` exists in the info directory, but + :raise FileNotFoundError: If ``path`` does not exist in the directory. + :raise NoneMetadataError: If ``path`` exists in the info directory, but cannot be read. """ raise NotImplementedError() @@ -470,8 +480,8 @@ def _iter_distributions(self) -> Iterator["BaseDistribution"]: """ raise NotImplementedError() - def iter_distributions(self) -> Iterator["BaseDistribution"]: - """Iterate through installed distributions.""" + def iter_all_distributions(self) -> Iterator[BaseDistribution]: + """Iterate through all installed distributions without any filtering.""" for dist in self._iter_distributions(): # Make sure the distribution actually comes from a valid Python # packaging distribution. Pip's AdjacentTempDirectory leaves folders @@ -501,6 +511,11 @@ def iter_installed_distributions( ) -> Iterator[BaseDistribution]: """Return a list of installed distributions. + This is based on ``iter_all_distributions()`` with additional filtering + options. Note that ``iter_installed_distributions()`` without arguments + is *not* equal to ``iter_all_distributions()``, since some of the + configurations exclude packages by default. + :param local_only: If True (default), only return installations local to the current virtualenv, if in a virtualenv. :param skip: An iterable of canonicalized project names to ignore; @@ -510,7 +525,7 @@ def iter_installed_distributions( :param user_only: If True, only report installations in the user site directory. """ - it = self.iter_distributions() + it = self.iter_all_distributions() if local_only: it = (d for d in it if d.local) if not include_editables: diff --git a/pipenv/patched/notpip/_internal/metadata/importlib/__init__.py b/pipenv/patched/notpip/_internal/metadata/importlib/__init__.py new file mode 100644 index 0000000000..5e7af9fe52 --- /dev/null +++ b/pipenv/patched/notpip/_internal/metadata/importlib/__init__.py @@ -0,0 +1,4 @@ +from ._dists import Distribution +from ._envs import Environment + +__all__ = ["Distribution", "Environment"] diff --git a/pipenv/patched/notpip/_internal/metadata/importlib/_compat.py b/pipenv/patched/notpip/_internal/metadata/importlib/_compat.py new file mode 100644 index 0000000000..2bc6bfd27e --- /dev/null +++ b/pipenv/patched/notpip/_internal/metadata/importlib/_compat.py @@ -0,0 +1,41 @@ +import importlib.metadata +from typing import Any, Optional, Protocol, cast + + +class BasePath(Protocol): + """A protocol that various path objects conform. + + This exists because importlib.metadata uses both ``pathlib.Path`` and + ``zipfile.Path``, and we need a common base for type hints (Union does not + work well since ``zipfile.Path`` is too new for our linter setup). + + This does not mean to be exhaustive, but only contains things that present + in both classes *that we need*. + """ + + name: str + + @property + def parent(self) -> "BasePath": + raise NotImplementedError() + + +def get_info_location(d: importlib.metadata.Distribution) -> Optional[BasePath]: + """Find the path to the distribution's metadata directory. + + HACK: This relies on importlib.metadata's private ``_path`` attribute. Not + all distributions exist on disk, so importlib.metadata is correct to not + expose the attribute as public. But pip's code base is old and not as clean, + so we do this to avoid having to rewrite too many things. Hopefully we can + eliminate this some day. + """ + return getattr(d, "_path", None) + + +def get_dist_name(dist: importlib.metadata.Distribution) -> str: + """Get the distribution's project name. + + The ``name`` attribute is only available in Python 3.10 or later. We are + targeting exactly that, but Mypy does not know this. + """ + return cast(Any, dist).name diff --git a/pipenv/patched/notpip/_internal/metadata/importlib/_dists.py b/pipenv/patched/notpip/_internal/metadata/importlib/_dists.py new file mode 100644 index 0000000000..b078eae215 --- /dev/null +++ b/pipenv/patched/notpip/_internal/metadata/importlib/_dists.py @@ -0,0 +1,274 @@ +import email.message +import importlib.metadata +import os +import pathlib +import zipfile +from typing import ( + Collection, + Dict, + Iterable, + Iterator, + Mapping, + NamedTuple, + Optional, + Sequence, +) + +from pipenv.patched.notpip._vendor.packaging.requirements import Requirement +from pipenv.patched.notpip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pipenv.patched.notpip._vendor.packaging.version import parse as parse_version + +from pipenv.patched.notpip._internal.exceptions import InvalidWheel, UnsupportedWheel +from pipenv.patched.notpip._internal.metadata.base import ( + BaseDistribution, + BaseEntryPoint, + DistributionVersion, + InfoPath, + Wheel, +) +from pipenv.patched.notpip._internal.utils.misc import normalize_path +from pipenv.patched.notpip._internal.utils.packaging import safe_extra +from pipenv.patched.notpip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file + +from ._compat import BasePath, get_dist_name + + +class WheelDistribution(importlib.metadata.Distribution): + """An ``importlib.metadata.Distribution`` read from a wheel. + + Although ``importlib.metadata.PathDistribution`` accepts ``zipfile.Path``, + its implementation is too "lazy" for pip's needs (we can't keep the ZipFile + handle open for the entire lifetime of the distribution object). + + This implementation eagerly reads the entire metadata directory into the + memory instead, and operates from that. + """ + + def __init__( + self, + files: Mapping[pathlib.PurePosixPath, bytes], + info_location: pathlib.PurePosixPath, + ) -> None: + self._files = files + self.info_location = info_location + + @classmethod + def from_zipfile( + cls, + zf: zipfile.ZipFile, + name: str, + location: str, + ) -> "WheelDistribution": + info_dir, _ = parse_wheel(zf, name) + paths = ( + (name, pathlib.PurePosixPath(name.split("/", 1)[-1])) + for name in zf.namelist() + if name.startswith(f"{info_dir}/") + ) + files = { + relpath: read_wheel_metadata_file(zf, fullpath) + for fullpath, relpath in paths + } + info_location = pathlib.PurePosixPath(location, info_dir) + return cls(files, info_location) + + def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]: + # Only allow iterating through the metadata directory. + if pathlib.PurePosixPath(str(path)) in self._files: + return iter(self._files) + raise FileNotFoundError(path) + + def read_text(self, filename: str) -> Optional[str]: + try: + data = self._files[pathlib.PurePosixPath(filename)] + except KeyError: + return None + try: + text = data.decode("utf-8") + except UnicodeDecodeError as e: + wheel = self.info_location.parent + error = f"Error decoding metadata for {wheel}: {e} in {filename} file" + raise UnsupportedWheel(error) + return text + + +class RequiresEntry(NamedTuple): + requirement: str + extra: str + marker: str + + +class Distribution(BaseDistribution): + def __init__( + self, + dist: importlib.metadata.Distribution, + info_location: Optional[BasePath], + installed_location: Optional[BasePath], + ) -> None: + self._dist = dist + self._info_location = info_location + self._installed_location = installed_location + + @classmethod + def from_directory(cls, directory: str) -> BaseDistribution: + info_location = pathlib.Path(directory) + dist = importlib.metadata.Distribution.at(info_location) + return cls(dist, info_location, info_location.parent) + + @classmethod + def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution: + try: + with wheel.as_zipfile() as zf: + dist = WheelDistribution.from_zipfile(zf, name, wheel.location) + except zipfile.BadZipFile as e: + raise InvalidWheel(wheel.location, name) from e + except UnsupportedWheel as e: + raise UnsupportedWheel(f"{name} has an invalid wheel, {e}") + return cls(dist, dist.info_location, pathlib.PurePosixPath(wheel.location)) + + @property + def location(self) -> Optional[str]: + if self._info_location is None: + return None + return str(self._info_location.parent) + + @property + def info_location(self) -> Optional[str]: + if self._info_location is None: + return None + return str(self._info_location) + + @property + def installed_location(self) -> Optional[str]: + if self._installed_location is None: + return None + return normalize_path(str(self._installed_location)) + + def _get_dist_name_from_location(self) -> Optional[str]: + """Try to get the name from the metadata directory name. + + This is much faster than reading metadata. + """ + if self._info_location is None: + return None + stem, suffix = os.path.splitext(self._info_location.name) + if suffix not in (".dist-info", ".egg-info"): + return None + return stem.split("-", 1)[0] + + @property + def canonical_name(self) -> NormalizedName: + name = self._get_dist_name_from_location() or get_dist_name(self._dist) + return canonicalize_name(name) + + @property + def version(self) -> DistributionVersion: + return parse_version(self._dist.version) + + def is_file(self, path: InfoPath) -> bool: + return self._dist.read_text(str(path)) is not None + + def iter_distutils_script_names(self) -> Iterator[str]: + # A distutils installation is always "flat" (not in e.g. egg form), so + # if this distribution's info location is NOT a pathlib.Path (but e.g. + # zipfile.Path), it can never contain any distutils scripts. + if not isinstance(self._info_location, pathlib.Path): + return + for child in self._info_location.joinpath("scripts").iterdir(): + yield child.name + + def read_text(self, path: InfoPath) -> str: + content = self._dist.read_text(str(path)) + if content is None: + raise FileNotFoundError(path) + return content + + def iter_entry_points(self) -> Iterable[BaseEntryPoint]: + # importlib.metadata's EntryPoint structure sasitfies BaseEntryPoint. + return self._dist.entry_points + + @property + def metadata(self) -> email.message.Message: + return self._dist.metadata + + def _iter_requires_txt_entries(self) -> Iterator[RequiresEntry]: + """Parse a ``requires.txt`` in an egg-info directory. + + This is an INI-ish format where an egg-info stores dependencies. A + section name describes extra other environment markers, while each entry + is an arbitrary string (not a key-value pair) representing a dependency + as a requirement string (no markers). + + There is a construct in ``importlib.metadata`` called ``Sectioned`` that + does mostly the same, but the format is currently considered private. + """ + content = self._dist.read_text("requires.txt") + if content is None: + return + extra = marker = "" # Section-less entries don't have markers. + for line in content.splitlines(): + line = line.strip() + if not line or line.startswith("#"): # Comment; ignored. + continue + if line.startswith("[") and line.endswith("]"): # A section header. + extra, _, marker = line.strip("[]").partition(":") + continue + yield RequiresEntry(requirement=line, extra=extra, marker=marker) + + def _iter_egg_info_extras(self) -> Iterable[str]: + """Get extras from the egg-info directory.""" + known_extras = {""} + for entry in self._iter_requires_txt_entries(): + if entry.extra in known_extras: + continue + known_extras.add(entry.extra) + yield entry.extra + + def iter_provided_extras(self) -> Iterable[str]: + iterator = ( + self._dist.metadata.get_all("Provides-Extra") + or self._iter_egg_info_extras() + ) + return (safe_extra(extra) for extra in iterator) + + def _iter_egg_info_dependencies(self) -> Iterable[str]: + """Get distribution dependencies from the egg-info directory. + + To ease parsing, this converts a legacy dependency entry into a PEP 508 + requirement string. Like ``_iter_requires_txt_entries()``, there is code + in ``importlib.metadata`` that does mostly the same, but not do exactly + what we need. + + Namely, ``importlib.metadata`` does not normalize the extra name before + putting it into the requirement string, which causes marker comparison + to fail because the dist-info format do normalize. This is consistent in + all currently available PEP 517 backends, although not standardized. + """ + for entry in self._iter_requires_txt_entries(): + if entry.extra and entry.marker: + marker = f'({entry.marker}) and extra == "{safe_extra(entry.extra)}"' + elif entry.extra: + marker = f'extra == "{safe_extra(entry.extra)}"' + elif entry.marker: + marker = entry.marker + else: + marker = "" + if marker: + yield f"{entry.requirement} ; {marker}" + else: + yield entry.requirement + + def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: + req_string_iterator = ( + self._dist.metadata.get_all("Requires-Dist") + or self._iter_egg_info_dependencies() + ) + contexts: Sequence[Dict[str, str]] = [{"extra": safe_extra(e)} for e in extras] + for req_string in req_string_iterator: + req = Requirement(req_string) + if not req.marker: + yield req + elif not extras and req.marker.evaluate({"extra": ""}): + yield req + elif any(req.marker.evaluate(context) for context in contexts): + yield req diff --git a/pipenv/patched/notpip/_internal/metadata/importlib/_envs.py b/pipenv/patched/notpip/_internal/metadata/importlib/_envs.py new file mode 100644 index 0000000000..9dc3c1e234 --- /dev/null +++ b/pipenv/patched/notpip/_internal/metadata/importlib/_envs.py @@ -0,0 +1,163 @@ +import functools +import importlib.metadata +import os +import pathlib +import sys +import zipfile +import zipimport +from typing import Iterator, List, Optional, Sequence, Set, Tuple + +from pipenv.patched.notpip._vendor.packaging.utils import NormalizedName, canonicalize_name + +from pipenv.patched.notpip._internal.metadata.base import BaseDistribution, BaseEnvironment +from pipenv.patched.notpip._internal.utils.deprecation import deprecated + +from ._compat import BasePath, get_dist_name, get_info_location +from ._dists import Distribution + + +class _DistributionFinder: + """Finder to locate distributions. + + The main purpose of this class is to memoize found distributions' names, so + only one distribution is returned for each package name. At lot of pip code + assumes this (because it is setuptools's behavior), and not doing the same + can potentially cause a distribution in lower precedence path to override a + higher precedence one if the caller is not careful. + + Eventually we probably want to make it possible to see lower precedence + installations as well. It's useful feature, after all. + """ + + FoundResult = Tuple[importlib.metadata.Distribution, Optional[BasePath]] + + def __init__(self) -> None: + self._found_names: Set[NormalizedName] = set() + + def _find_impl(self, location: str) -> Iterator[FoundResult]: + """Find distributions in a location.""" + # To know exactly where we find a distribution, we have to feed in the + # paths one by one, instead of dumping the list to importlib.metadata. + for dist in importlib.metadata.distributions(path=[location]): + normalized_name = canonicalize_name(get_dist_name(dist)) + if normalized_name in self._found_names: + continue + self._found_names.add(normalized_name) + info_location = get_info_location(dist) + yield dist, info_location + + def find(self, location: str) -> Iterator[BaseDistribution]: + """Find distributions in a location. + + The path can be either a directory, or a ZIP archive. + """ + for dist, info_location in self._find_impl(location): + if info_location is None: + installed_location: Optional[BasePath] = None + else: + installed_location = info_location.parent + yield Distribution(dist, info_location, installed_location) + + def find_linked(self, location: str) -> Iterator[BaseDistribution]: + """Read location in egg-link files and return distributions in there. + + The path should be a directory; otherwise this returns nothing. This + follows how setuptools does this for compatibility. The first non-empty + line in the egg-link is read as a path (resolved against the egg-link's + containing directory if relative). Distributions found at that linked + location are returned. + """ + path = pathlib.Path(location) + if not path.is_dir(): + return + for child in path.iterdir(): + if child.suffix != ".egg-link": + continue + with child.open() as f: + lines = (line.strip() for line in f) + target_rel = next((line for line in lines if line), "") + if not target_rel: + continue + target_location = str(path.joinpath(target_rel)) + for dist, info_location in self._find_impl(target_location): + yield Distribution(dist, info_location, path) + + def _find_eggs_in_dir(self, location: str) -> Iterator[BaseDistribution]: + from pipenv.patched.notpip._vendor.pkg_resources import find_distributions + + from pipenv.patched.notpip._internal.metadata import pkg_resources as legacy + + with os.scandir(location) as it: + for entry in it: + if not entry.name.endswith(".egg"): + continue + for dist in find_distributions(entry.path): + yield legacy.Distribution(dist) + + def _find_eggs_in_zip(self, location: str) -> Iterator[BaseDistribution]: + from pipenv.patched.notpip._vendor.pkg_resources import find_eggs_in_zip + + from pipenv.patched.notpip._internal.metadata import pkg_resources as legacy + + try: + importer = zipimport.zipimporter(location) + except zipimport.ZipImportError: + return + for dist in find_eggs_in_zip(importer, location): + yield legacy.Distribution(dist) + + def find_eggs(self, location: str) -> Iterator[BaseDistribution]: + """Find eggs in a location. + + This actually uses the old *pkg_resources* backend. We likely want to + deprecate this so we can eventually remove the *pkg_resources* + dependency entirely. Before that, this should first emit a deprecation + warning for some versions when using the fallback since importing + *pkg_resources* is slow for those who don't need it. + """ + if os.path.isdir(location): + yield from self._find_eggs_in_dir(location) + if zipfile.is_zipfile(location): + yield from self._find_eggs_in_zip(location) + + +@functools.lru_cache(maxsize=None) # Warn a distribution exactly once. +def _emit_egg_deprecation(location: Optional[str]) -> None: + deprecated( + reason=f"Loading egg at {location} is deprecated.", + replacement="to use pip for package installation.", + gone_in=None, + ) + + +class Environment(BaseEnvironment): + def __init__(self, paths: Sequence[str]) -> None: + self._paths = paths + + @classmethod + def default(cls) -> BaseEnvironment: + return cls(sys.path) + + @classmethod + def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment: + if paths is None: + return cls(sys.path) + return cls(paths) + + def _iter_distributions(self) -> Iterator[BaseDistribution]: + finder = _DistributionFinder() + for location in self._paths: + yield from finder.find(location) + for dist in finder.find_eggs(location): + # _emit_egg_deprecation(dist.location) # TODO: Enable this. + yield dist + # This must go last because that's how pkg_resources tie-breaks. + yield from finder.find_linked(location) + + def get_distribution(self, name: str) -> Optional[BaseDistribution]: + matches = ( + distribution + for distribution in self.iter_all_distributions() + if distribution.canonical_name == canonicalize_name(name) + ) + return next(matches, None) diff --git a/pipenv/patched/notpip/_internal/metadata/pkg_resources.py b/pipenv/patched/notpip/_internal/metadata/pkg_resources.py index b56713d64f..dfe38fb32b 100644 --- a/pipenv/patched/notpip/_internal/metadata/pkg_resources.py +++ b/pipenv/patched/notpip/_internal/metadata/pkg_resources.py @@ -2,7 +2,6 @@ import email.parser import logging import os -import pathlib import zipfile from typing import Collection, Iterable, Iterator, List, Mapping, NamedTuple, Optional @@ -12,7 +11,8 @@ from pipenv.patched.notpip._vendor.packaging.version import parse as parse_version from pipenv.patched.notpip._internal.exceptions import InvalidWheel, NoneMetadataError, UnsupportedWheel -from pipenv.patched.notpip._internal.utils.misc import display_path +from pipenv.patched.notpip._internal.utils.egg_link import egg_link_path_from_location +from pipenv.patched.notpip._internal.utils.misc import display_path, normalize_path from pipenv.patched.notpip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file from .base import ( @@ -73,7 +73,7 @@ def __init__(self, dist: pkg_resources.Distribution) -> None: self._dist = dist @classmethod - def from_directory(cls, directory: str) -> "Distribution": + def from_directory(cls, directory: str) -> BaseDistribution: dist_dir = directory.rstrip(os.sep) # Build a PathMetadata object, from path to metadata. :wink: @@ -93,14 +93,7 @@ def from_directory(cls, directory: str) -> "Distribution": return cls(dist) @classmethod - def from_wheel(cls, wheel: Wheel, name: str) -> "Distribution": - """Load the distribution from a given wheel. - - :raises InvalidWheel: Whenever loading of the wheel causes a - :py:exc:`zipfile.BadZipFile` exception to be thrown. - :raises UnsupportedWheel: If the wheel is a valid zip, but malformed - internally. - """ + def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution: try: with wheel.as_zipfile() as zf: info_dir, _ = parse_wheel(zf, name) @@ -124,6 +117,17 @@ def from_wheel(cls, wheel: Wheel, name: str) -> "Distribution": def location(self) -> Optional[str]: return self._dist.location + @property + def installed_location(self) -> Optional[str]: + egg_link = egg_link_path_from_location(self.raw_name) + if egg_link: + location = egg_link + elif self.location: + location = self.location + else: + return None + return normalize_path(location) + @property def info_location(self) -> Optional[str]: return self._dist.egg_info @@ -149,14 +153,8 @@ def version(self) -> DistributionVersion: def is_file(self, path: InfoPath) -> bool: return self._dist.has_metadata(str(path)) - def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]: - name = str(path) - if not self._dist.has_metadata(name): - raise FileNotFoundError(name) - if not self._dist.isdir(name): - raise NotADirectoryError(name) - for child in self._dist.metadata_listdir(name): - yield pathlib.PurePosixPath(path, child) + def iter_distutils_script_names(self) -> Iterator[str]: + yield from self._dist.metadata_listdir("scripts") def read_text(self, path: InfoPath) -> str: name = str(path) @@ -217,6 +215,10 @@ def default(cls) -> BaseEnvironment: def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment: return cls(pkg_resources.WorkingSet(paths)) + def _iter_distributions(self) -> Iterator[BaseDistribution]: + for dist in self._ws: + yield Distribution(dist) + def _search_distribution(self, name: str) -> Optional[BaseDistribution]: """Find a distribution matching the ``name`` in the environment. @@ -224,7 +226,7 @@ def _search_distribution(self, name: str) -> Optional[BaseDistribution]: match the behavior of ``pkg_resources.get_distribution()``. """ canonical_name = canonicalize_name(name) - for dist in self.iter_distributions(): + for dist in self.iter_all_distributions(): if dist.canonical_name == canonical_name: return dist return None @@ -250,7 +252,3 @@ def get_distribution(self, name: str) -> Optional[BaseDistribution]: except pkg_resources.DistributionNotFound: return None return self._search_distribution(name) - - def _iter_distributions(self) -> Iterator[BaseDistribution]: - for dist in self._ws: - yield Distribution(dist) diff --git a/pipenv/patched/notpip/_internal/models/direct_url.py b/pipenv/patched/notpip/_internal/models/direct_url.py index 92060d45db..e75feda9ca 100644 --- a/pipenv/patched/notpip/_internal/models/direct_url.py +++ b/pipenv/patched/notpip/_internal/models/direct_url.py @@ -74,14 +74,10 @@ def __init__( vcs: str, commit_id: str, requested_revision: Optional[str] = None, - resolved_revision: Optional[str] = None, - resolved_revision_type: Optional[str] = None, ) -> None: self.vcs = vcs self.requested_revision = requested_revision self.commit_id = commit_id - self.resolved_revision = resolved_revision - self.resolved_revision_type = resolved_revision_type @classmethod def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]: @@ -91,8 +87,6 @@ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]: vcs=_get_required(d, str, "vcs"), commit_id=_get_required(d, str, "commit_id"), requested_revision=_get(d, str, "requested_revision"), - resolved_revision=_get(d, str, "resolved_revision"), - resolved_revision_type=_get(d, str, "resolved_revision_type"), ) def _to_dict(self) -> Dict[str, Any]: @@ -100,8 +94,6 @@ def _to_dict(self) -> Dict[str, Any]: vcs=self.vcs, requested_revision=self.requested_revision, commit_id=self.commit_id, - resolved_revision=self.resolved_revision, - resolved_revision_type=self.resolved_revision_type, ) diff --git a/pipenv/patched/notpip/_internal/network/cache.py b/pipenv/patched/notpip/_internal/network/cache.py index 99b84b7d8d..7d0265d2c3 100644 --- a/pipenv/patched/notpip/_internal/network/cache.py +++ b/pipenv/patched/notpip/_internal/network/cache.py @@ -3,7 +3,7 @@ import os from contextlib import contextmanager -from typing import Iterator, Optional +from typing import Generator, Optional from pipenv.patched.notpip._vendor.cachecontrol.cache import BaseCache from pipenv.patched.notpip._vendor.cachecontrol.caches import FileCache @@ -18,7 +18,7 @@ def is_from_cache(response: Response) -> bool: @contextmanager -def suppressed_cache_errors() -> Iterator[None]: +def suppressed_cache_errors() -> Generator[None, None, None]: """If we can't access the cache then we can just skip caching and process requests as if caching wasn't enabled. """ diff --git a/pipenv/patched/notpip/_internal/network/lazy_wheel.py b/pipenv/patched/notpip/_internal/network/lazy_wheel.py index 42545850c5..fc4165c297 100644 --- a/pipenv/patched/notpip/_internal/network/lazy_wheel.py +++ b/pipenv/patched/notpip/_internal/network/lazy_wheel.py @@ -5,7 +5,7 @@ from bisect import bisect_left, bisect_right from contextlib import contextmanager from tempfile import NamedTemporaryFile -from typing import Any, Dict, Iterator, List, Optional, Tuple +from typing import Any, Dict, Generator, List, Optional, Tuple from zipfile import BadZipfile, ZipFile from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name @@ -139,7 +139,7 @@ def __exit__(self, *exc: Any) -> Optional[bool]: return self._file.__exit__(*exc) @contextmanager - def _stay(self) -> Iterator[None]: + def _stay(self) -> Generator[None, None, None]: """Return a context manager keeping the position. At the end of the block, seek back to original position. @@ -177,8 +177,8 @@ def _stream_response( def _merge( self, start: int, end: int, left: int, right: int - ) -> Iterator[Tuple[int, int]]: - """Return an iterator of intervals to be fetched. + ) -> Generator[Tuple[int, int], None, None]: + """Return a generator of intervals to be fetched. Args: start (int): Start of needed interval diff --git a/pipenv/patched/notpip/_internal/network/session.py b/pipenv/patched/notpip/_internal/network/session.py index 88f79b0b38..76e563acf1 100644 --- a/pipenv/patched/notpip/_internal/network/session.py +++ b/pipenv/patched/notpip/_internal/network/session.py @@ -15,7 +15,7 @@ import sys import urllib.parse import warnings -from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union +from typing import Any, Dict, Generator, List, Mapping, Optional, Sequence, Tuple, Union from pipenv.patched.notpip._vendor import requests, urllib3 from pipenv.patched.notpip._vendor.cachecontrol import CacheControlAdapter @@ -374,7 +374,7 @@ def add_trusted_host( # Mount wildcard ports for the same host. self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter) - def iter_secure_origins(self) -> Iterator[SecureOrigin]: + def iter_secure_origins(self) -> Generator[SecureOrigin, None, None]: yield from SECURE_ORIGINS for host, port in self.pip_trusted_origins: yield ("*", host, "*" if port is None else port) @@ -449,6 +449,8 @@ def is_secure_origin(self, location: Link) -> bool: def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response: # Allow setting a default timeout on a session kwargs.setdefault("timeout", self.timeout) + # Allow setting a default proxies on a session + kwargs.setdefault("proxies", self.proxies) # Dispatch the actual request return super().request(method, url, *args, **kwargs) diff --git a/pipenv/patched/notpip/_internal/network/utils.py b/pipenv/patched/notpip/_internal/network/utils.py index 3825f9ed9e..672f6ad43f 100644 --- a/pipenv/patched/notpip/_internal/network/utils.py +++ b/pipenv/patched/notpip/_internal/network/utils.py @@ -1,4 +1,4 @@ -from typing import Dict, Iterator +from typing import Dict, Generator from pipenv.patched.notpip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response @@ -56,7 +56,7 @@ def raise_for_status(resp: Response) -> None: def response_chunks( response: Response, chunk_size: int = CONTENT_CHUNK_SIZE -) -> Iterator[bytes]: +) -> Generator[bytes, None, None]: """Given a requests Response, provide the data chunks.""" try: # Special case for urllib3. diff --git a/pipenv/patched/notpip/_internal/req/req_tracker.py b/pipenv/patched/notpip/_internal/operations/build/build_tracker.py similarity index 85% rename from pipenv/patched/notpip/_internal/req/req_tracker.py rename to pipenv/patched/notpip/_internal/operations/build/build_tracker.py index f0d9d92c9e..49b601b7dc 100644 --- a/pipenv/patched/notpip/_internal/req/req_tracker.py +++ b/pipenv/patched/notpip/_internal/operations/build/build_tracker.py @@ -3,7 +3,7 @@ import logging import os from types import TracebackType -from typing import Dict, Iterator, Optional, Set, Type, Union +from typing import Dict, Generator, Optional, Set, Type, Union from pipenv.patched.notpip._internal.models.link import Link from pipenv.patched.notpip._internal.req.req_install import InstallRequirement @@ -13,7 +13,7 @@ @contextlib.contextmanager -def update_env_context_manager(**changes: str) -> Iterator[None]: +def update_env_context_manager(**changes: str) -> Generator[None, None, None]: target = os.environ # Save values from the target and change them. @@ -39,25 +39,25 @@ def update_env_context_manager(**changes: str) -> Iterator[None]: @contextlib.contextmanager -def get_requirement_tracker() -> Iterator["RequirementTracker"]: - root = os.environ.get("PIP_REQ_TRACKER") +def get_build_tracker() -> Generator["BuildTracker", None, None]: + root = os.environ.get("PIP_BUILD_TRACKER") with contextlib.ExitStack() as ctx: if root is None: - root = ctx.enter_context(TempDirectory(kind="req-tracker")).path - ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root)) + root = ctx.enter_context(TempDirectory(kind="build-tracker")).path + ctx.enter_context(update_env_context_manager(PIP_BUILD_TRACKER=root)) logger.debug("Initialized build tracking at %s", root) - with RequirementTracker(root) as tracker: + with BuildTracker(root) as tracker: yield tracker -class RequirementTracker: +class BuildTracker: def __init__(self, root: str) -> None: self._root = root self._entries: Set[InstallRequirement] = set() logger.debug("Created build tracker: %s", self._root) - def __enter__(self) -> "RequirementTracker": + def __enter__(self) -> "BuildTracker": logger.debug("Entered build tracker: %s", self._root) return self @@ -118,7 +118,7 @@ def cleanup(self) -> None: logger.debug("Removed build tracker: %r", self._root) @contextlib.contextmanager - def track(self, req: InstallRequirement) -> Iterator[None]: + def track(self, req: InstallRequirement) -> Generator[None, None, None]: self.add(req) yield self.remove(req) diff --git a/pipenv/patched/notpip/_internal/operations/freeze.py b/pipenv/patched/notpip/_internal/operations/freeze.py index 8c4471beab..4b45b8ec67 100644 --- a/pipenv/patched/notpip/_internal/operations/freeze.py +++ b/pipenv/patched/notpip/_internal/operations/freeze.py @@ -1,7 +1,7 @@ import collections import logging import os -from typing import Container, Dict, Iterable, Iterator, List, NamedTuple, Optional, Set +from typing import Container, Dict, Generator, Iterable, List, NamedTuple, Optional, Set from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name from pipenv.patched.notpip._vendor.packaging.version import Version @@ -31,7 +31,7 @@ def freeze( isolated: bool = False, exclude_editable: bool = False, skip: Container[str] = (), -) -> Iterator[str]: +) -> Generator[str, None, None]: installations: Dict[str, FrozenRequirement] = {} dists = get_environment(paths).iter_installed_distributions( diff --git a/pipenv/patched/notpip/_internal/operations/install/wheel.py b/pipenv/patched/notpip/_internal/operations/install/wheel.py index 41a3b32daf..e3fbfbcdd7 100644 --- a/pipenv/patched/notpip/_internal/operations/install/wheel.py +++ b/pipenv/patched/notpip/_internal/operations/install/wheel.py @@ -22,6 +22,7 @@ BinaryIO, Callable, Dict, + Generator, Iterable, Iterator, List, @@ -589,7 +590,7 @@ def is_entrypoint_wrapper(file: "File") -> bool: file.save() record_installed(file.src_record_path, file.dest_path, file.changed) - def pyc_source_file_paths() -> Iterator[str]: + def pyc_source_file_paths() -> Generator[str, None, None]: # We de-duplicate installation paths, since there can be overlap (e.g. # file in .data maps to same location as file in wheel root). # Sorting installation paths makes it easier to reproduce and debug @@ -656,7 +657,7 @@ def pyc_output_path(path: str) -> str: generated_file_mode = 0o666 & ~current_umask() @contextlib.contextmanager - def _generate_file(path: str, **kwargs: Any) -> Iterator[BinaryIO]: + def _generate_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]: with adjacent_tmp_file(path, **kwargs) as f: yield f os.chmod(f.name, generated_file_mode) @@ -706,7 +707,7 @@ def _generate_file(path: str, **kwargs: Any) -> Iterator[BinaryIO]: @contextlib.contextmanager -def req_error_context(req_description: str) -> Iterator[None]: +def req_error_context(req_description: str) -> Generator[None, None, None]: try: yield except InstallationError as e: diff --git a/pipenv/patched/notpip/_internal/operations/prepare.py b/pipenv/patched/notpip/_internal/operations/prepare.py index 58a6f3f2d6..a87396cde2 100644 --- a/pipenv/patched/notpip/_internal/operations/prepare.py +++ b/pipenv/patched/notpip/_internal/operations/prepare.py @@ -33,12 +33,11 @@ dist_from_wheel_url, ) from pipenv.patched.notpip._internal.network.session import PipSession +from pipenv.patched.notpip._internal.operations.build.build_tracker import BuildTracker from pipenv.patched.notpip._internal.req.req_install import InstallRequirement -from pipenv.patched.notpip._internal.req.req_tracker import RequirementTracker -from pipenv.patched.notpip._internal.utils.filesystem import copy2_fixed from pipenv.patched.notpip._internal.utils.hashes import Hashes, MissingHashes from pipenv.patched.notpip._internal.utils.logging import indent_log -from pipenv.patched.notpip._internal.utils.misc import display_path, hide_url, is_installable_dir, rmtree +from pipenv.patched.notpip._internal.utils.misc import display_path, hide_url, is_installable_dir from pipenv.patched.notpip._internal.utils.temp_dir import TempDirectory from pipenv.patched.notpip._internal.utils.unpacking import unpack_file from pipenv.patched.notpip._internal.vcs import vcs @@ -48,14 +47,17 @@ def _get_prepared_distribution( req: InstallRequirement, - req_tracker: RequirementTracker, + build_tracker: BuildTracker, finder: PackageFinder, build_isolation: bool, + check_build_deps: bool, ) -> BaseDistribution: """Prepare a distribution for installation.""" abstract_dist = make_distribution_for_install_requirement(req) - with req_tracker.track(req): - abstract_dist.prepare_distribution_metadata(finder, build_isolation) + with build_tracker.track(req): + abstract_dist.prepare_distribution_metadata( + finder, build_isolation, check_build_deps + ) return abstract_dist.get_metadata_distribution() @@ -98,55 +100,6 @@ def get_http_url( return File(from_path, content_type) -def _copy2_ignoring_special_files(src: str, dest: str) -> None: - """Copying special files is not supported, but as a convenience to users - we skip errors copying them. This supports tools that may create e.g. - socket files in the project source directory. - """ - try: - copy2_fixed(src, dest) - except shutil.SpecialFileError as e: - # SpecialFileError may be raised due to either the source or - # destination. If the destination was the cause then we would actually - # care, but since the destination directory is deleted prior to - # copy we ignore all of them assuming it is caused by the source. - logger.warning( - "Ignoring special file error '%s' encountered copying %s to %s.", - str(e), - src, - dest, - ) - - -def _copy_source_tree(source: str, target: str) -> None: - target_abspath = os.path.abspath(target) - target_basename = os.path.basename(target_abspath) - target_dirname = os.path.dirname(target_abspath) - - def ignore(d: str, names: List[str]) -> List[str]: - skipped: List[str] = [] - if d == source: - # Pulling in those directories can potentially be very slow, - # exclude the following directories if they appear in the top - # level dir (and only it). - # See discussion at https://github.com/pypa/pip/pull/6770 - skipped += [".tox", ".nox"] - if os.path.abspath(d) == target_dirname: - # Prevent an infinite recursion if the target is in source. - # This can happen when TMPDIR is set to ${PWD}/... - # and we copy PWD to TMPDIR. - skipped += [target_basename] - return skipped - - shutil.copytree( - source, - target, - ignore=ignore, - symlinks=True, - copy_function=_copy2_ignoring_special_files, - ) - - def get_file_url( link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None ) -> File: @@ -191,19 +144,7 @@ def unpack_url( unpack_vcs_link(link, location, verbosity=verbosity) return None - # Once out-of-tree-builds are no longer supported, could potentially - # replace the below condition with `assert not link.is_existing_dir` - # - unpack_url does not need to be called for in-tree-builds. - # - # As further cleanup, _copy_source_tree and accompanying tests can - # be removed. - # - # TODO when use-deprecated=out-of-tree-build is removed - if link.is_existing_dir(): - if os.path.isdir(location): - rmtree(location) - _copy_source_tree(link.file_path, location) - return None + assert not link.is_existing_dir() # file urls if link.is_file: @@ -261,7 +202,8 @@ def __init__( download_dir: Optional[str], src_dir: str, build_isolation: bool, - req_tracker: RequirementTracker, + check_build_deps: bool, + build_tracker: BuildTracker, session: PipSession, progress_bar: str, finder: PackageFinder, @@ -269,13 +211,12 @@ def __init__( use_user_site: bool, lazy_wheel: bool, verbosity: int, - in_tree_build: bool, ) -> None: super().__init__() self.src_dir = src_dir self.build_dir = build_dir - self.req_tracker = req_tracker + self.build_tracker = build_tracker self._session = session self._download = Downloader(session, progress_bar) self._batch_download = BatchDownloader(session, progress_bar) @@ -288,6 +229,9 @@ def __init__( # Is build isolation allowed? self.build_isolation = build_isolation + # Should check build dependencies? + self.check_build_deps = check_build_deps + # Should hash-checking be required? self.require_hashes = require_hashes @@ -300,9 +244,6 @@ def __init__( # How verbose should underlying tooling be? self.verbosity = verbosity - # Should in-tree builds be used for local paths? - self.in_tree_build = in_tree_build - # Memoized downloaded files, as mapping of url: path. self._downloaded: Dict[str, str] = {} @@ -336,7 +277,7 @@ def _ensure_link_req_src_dir( # directory. return assert req.source_dir is None - if req.link.is_existing_dir() and self.in_tree_build: + if req.link.is_existing_dir(): # build local directories in-tree req.source_dir = req.link.file_path return @@ -525,7 +466,7 @@ def _prepare_linked_requirement( self._ensure_link_req_src_dir(req, parallel_builds) hashes = self._get_linked_req_hashes(req) - if link.is_existing_dir() and self.in_tree_build: + if link.is_existing_dir(): local_file = None elif link.url not in self._downloaded: try: @@ -555,9 +496,10 @@ def _prepare_linked_requirement( dist = _get_prepared_distribution( req, - self.req_tracker, + self.build_tracker, self.finder, self.build_isolation, + self.check_build_deps, ) return dist @@ -608,9 +550,10 @@ def prepare_editable_requirement( dist = _get_prepared_distribution( req, - self.req_tracker, + self.build_tracker, self.finder, self.build_isolation, + self.check_build_deps, ) req.check_if_exists(self.use_user_site) diff --git a/pipenv/patched/notpip/_internal/pyproject.py b/pipenv/patched/notpip/_internal/pyproject.py index c9544e181e..4b34aac4de 100644 --- a/pipenv/patched/notpip/_internal/pyproject.py +++ b/pipenv/patched/notpip/_internal/pyproject.py @@ -1,3 +1,4 @@ +import importlib.util import os from collections import namedtuple from typing import Any, List, Optional @@ -89,9 +90,15 @@ def load_pyproject_toml( # If we haven't worked out whether to use PEP 517 yet, # and the user hasn't explicitly stated a preference, - # we do so if the project has a pyproject.toml file. + # we do so if the project has a pyproject.toml file + # or if we cannot import setuptools. + + # We fallback to PEP 517 when without setuptools, + # so setuptools can be installed as a default build backend. + # For more info see: + # https://discuss.python.org/t/pip-without-setuptools-could-the-experience-be-improved/11810/9 elif use_pep517 is None: - use_pep517 = has_pyproject + use_pep517 = has_pyproject or not importlib.util.find_spec("setuptools") # At this point, we know whether we're going to use PEP 517. assert use_pep517 is not None diff --git a/pipenv/patched/notpip/_internal/req/__init__.py b/pipenv/patched/notpip/_internal/req/__init__.py index 0f68f7e3c6..9ba8a167d9 100644 --- a/pipenv/patched/notpip/_internal/req/__init__.py +++ b/pipenv/patched/notpip/_internal/req/__init__.py @@ -1,6 +1,6 @@ import collections import logging -from typing import Iterator, List, Optional, Sequence, Tuple +from typing import Generator, List, Optional, Sequence, Tuple from pipenv.patched.notpip._internal.utils.logging import indent_log @@ -28,7 +28,7 @@ def __repr__(self) -> str: def _validate_requirements( requirements: List[InstallRequirement], -) -> Iterator[Tuple[str, InstallRequirement]]: +) -> Generator[Tuple[str, InstallRequirement], None, None]: for req in requirements: assert req.name, f"invalid to-be-installed requirement: {req}" yield req.name, req diff --git a/pipenv/patched/notpip/_internal/req/constructors.py b/pipenv/patched/notpip/_internal/req/constructors.py index cef8ed1313..5dce4ea330 100644 --- a/pipenv/patched/notpip/_internal/req/constructors.py +++ b/pipenv/patched/notpip/_internal/req/constructors.py @@ -207,6 +207,7 @@ def install_req_from_editable( constraint: bool = False, user_supplied: bool = False, permit_editable_wheels: bool = False, + config_settings: Optional[Dict[str, str]] = None, ) -> InstallRequirement: parts = parse_req_from_editable(editable_req) @@ -224,6 +225,7 @@ def install_req_from_editable( install_options=options.get("install_options", []) if options else [], global_options=options.get("global_options", []) if options else [], hash_options=options.get("hashes", {}) if options else {}, + config_settings=config_settings, extras=parts.extras, ) @@ -380,6 +382,7 @@ def install_req_from_line( constraint: bool = False, line_source: Optional[str] = None, user_supplied: bool = False, + config_settings: Optional[Dict[str, str]] = None, ) -> InstallRequirement: """Creates an InstallRequirement from a name, which might be a requirement, directory containing 'setup.py', filename, or URL. @@ -399,6 +402,7 @@ def install_req_from_line( install_options=options.get("install_options", []) if options else [], global_options=options.get("global_options", []) if options else [], hash_options=options.get("hashes", {}) if options else {}, + config_settings=config_settings, constraint=constraint, extras=parts.extras, user_supplied=user_supplied, @@ -411,6 +415,7 @@ def install_req_from_req_string( isolated: bool = False, use_pep517: Optional[bool] = None, user_supplied: bool = False, + config_settings: Optional[Dict[str, str]] = None, ) -> InstallRequirement: try: req = get_requirement(req_string) @@ -440,6 +445,7 @@ def install_req_from_req_string( isolated=isolated, use_pep517=use_pep517, user_supplied=user_supplied, + config_settings=config_settings, ) @@ -448,6 +454,7 @@ def install_req_from_parsed_requirement( isolated: bool = False, use_pep517: Optional[bool] = None, user_supplied: bool = False, + config_settings: Optional[Dict[str, str]] = None, ) -> InstallRequirement: if parsed_req.is_editable: req = install_req_from_editable( @@ -457,6 +464,7 @@ def install_req_from_parsed_requirement( constraint=parsed_req.constraint, isolated=isolated, user_supplied=user_supplied, + config_settings=config_settings, ) else: @@ -469,6 +477,7 @@ def install_req_from_parsed_requirement( constraint=parsed_req.constraint, line_source=parsed_req.line_source, user_supplied=user_supplied, + config_settings=config_settings, ) return req @@ -487,4 +496,6 @@ def install_req_from_link_and_ireq( install_options=ireq.install_options, global_options=ireq.global_options, hash_options=ireq.hash_options, + config_settings=ireq.config_settings, + user_supplied=ireq.user_supplied, ) diff --git a/pipenv/patched/notpip/_internal/req/req_file.py b/pipenv/patched/notpip/_internal/req/req_file.py index 005ab59368..dd0fc901fe 100644 --- a/pipenv/patched/notpip/_internal/req/req_file.py +++ b/pipenv/patched/notpip/_internal/req/req_file.py @@ -13,8 +13,8 @@ Any, Callable, Dict, + Generator, Iterable, - Iterator, List, Optional, Tuple, @@ -129,7 +129,7 @@ def parse_requirements( finder: Optional["PackageFinder"] = None, options: Optional[optparse.Values] = None, constraint: bool = False, -) -> Iterator[ParsedRequirement]: +) -> Generator[ParsedRequirement, None, None]: """Parse a requirements file and yield ParsedRequirement instances. :param filename: Path or url of requirements file. @@ -233,6 +233,8 @@ def handle_option_line( index_urls = [opts.index_url] if opts.no_index is True: index_urls = [] + if opts.extra_index_urls: + index_urls.extend(opts.extra_index_urls) if opts.find_links: # FIXME: it would be nice to keep track of the source # of the find_links: support a find-links local path @@ -319,13 +321,15 @@ def __init__( self._session = session self._line_parser = line_parser - def parse(self, filename: str, constraint: bool) -> Iterator[ParsedLine]: + def parse( + self, filename: str, constraint: bool + ) -> Generator[ParsedLine, None, None]: """Parse a given file, yielding parsed lines.""" yield from self._parse_and_recurse(filename, constraint) def _parse_and_recurse( self, filename: str, constraint: bool - ) -> Iterator[ParsedLine]: + ) -> Generator[ParsedLine, None, None]: for line in self._parse_file(filename, constraint): if not line.is_requirement and ( line.opts.requirements or line.opts.constraints @@ -354,7 +358,9 @@ def _parse_and_recurse( else: yield line - def _parse_file(self, filename: str, constraint: bool) -> Iterator[ParsedLine]: + def _parse_file( + self, filename: str, constraint: bool + ) -> Generator[ParsedLine, None, None]: _, content = get_file_content(filename, self._session) lines_enum = preprocess(content) diff --git a/pipenv/patched/notpip/_internal/req/req_install.py b/pipenv/patched/notpip/_internal/req/req_install.py index c5ef9a61d2..f55e82864d 100644 --- a/pipenv/patched/notpip/_internal/req/req_install.py +++ b/pipenv/patched/notpip/_internal/req/req_install.py @@ -46,6 +46,7 @@ ) from pipenv.patched.notpip._internal.utils.hashes import Hashes from pipenv.patched.notpip._internal.utils.misc import ( + ConfiguredPep517HookCaller, ask_path_exists, backup_dir, display_path, @@ -80,6 +81,7 @@ def __init__( install_options: Optional[List[str]] = None, global_options: Optional[List[str]] = None, hash_options: Optional[Dict[str, List[str]]] = None, + config_settings: Optional[Dict[str, str]] = None, constraint: bool = False, extras: Collection[str] = (), user_supplied: bool = False, @@ -138,6 +140,7 @@ def __init__( self.install_options = install_options if install_options else [] self.global_options = global_options if global_options else [] self.hash_options = hash_options if hash_options else {} + self.config_settings = config_settings # Set to True after successful preparation of this requirement self.prepared = False # User supplied requirement are explicitly requested for installation @@ -470,7 +473,8 @@ def load_pyproject_toml(self) -> None: requires, backend, check, backend_path = pyproject_toml_data self.requirements_to_check = check self.pyproject_requires = requires - self.pep517_backend = Pep517HookCaller( + self.pep517_backend = ConfiguredPep517HookCaller( + self, self.unpacked_source_directory, backend, backend_path=backend_path, diff --git a/pipenv/patched/notpip/_internal/req/req_set.py b/pipenv/patched/notpip/_internal/req/req_set.py index 697ee8483b..af0493fbcb 100644 --- a/pipenv/patched/notpip/_internal/req/req_set.py +++ b/pipenv/patched/notpip/_internal/req/req_set.py @@ -1,13 +1,10 @@ import logging from collections import OrderedDict -from typing import Dict, Iterable, List, Optional, Tuple +from typing import Dict, List from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name -from pipenv.patched.notpip._internal.exceptions import InstallationError -from pipenv.patched.notpip._internal.models.wheel import Wheel from pipenv.patched.notpip._internal.req.req_install import InstallRequirement -from pipenv.patched.notpip._internal.utils import compatibility_tags logger = logging.getLogger(__name__) @@ -51,123 +48,6 @@ def add_named_requirement(self, install_req: InstallRequirement) -> None: project_name = canonicalize_name(install_req.name) self.requirements[project_name] = install_req - def add_requirement( - self, - install_req: InstallRequirement, - parent_req_name: Optional[str] = None, - extras_requested: Optional[Iterable[str]] = None, - ) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]: - """Add install_req as a requirement to install. - - :param parent_req_name: The name of the requirement that needed this - added. The name is used because when multiple unnamed requirements - resolve to the same name, we could otherwise end up with dependency - links that point outside the Requirements set. parent_req must - already be added. Note that None implies that this is a user - supplied requirement, vs an inferred one. - :param extras_requested: an iterable of extras used to evaluate the - environment markers. - :return: Additional requirements to scan. That is either [] if - the requirement is not applicable, or [install_req] if the - requirement is applicable and has just been added. - """ - # If the markers do not match, ignore this requirement. - if not install_req.match_markers(extras_requested): - logger.info( - "Ignoring %s: markers '%s' don't match your environment", - install_req.name, - install_req.markers, - ) - return [], None - - # If the wheel is not supported, raise an error. - # Should check this after filtering out based on environment markers to - # allow specifying different wheels based on the environment/OS, in a - # single requirements file. - if install_req.link and install_req.link.is_wheel: - wheel = Wheel(install_req.link.filename) - tags = compatibility_tags.get_supported() - if self.check_supported_wheels and not wheel.supported(tags): - raise InstallationError( - "{} is not a supported wheel on this platform.".format( - wheel.filename - ) - ) - - # This next bit is really a sanity check. - assert ( - not install_req.user_supplied or parent_req_name is None - ), "a user supplied req shouldn't have a parent" - - # Unnamed requirements are scanned again and the requirement won't be - # added as a dependency until after scanning. - if not install_req.name: - self.add_unnamed_requirement(install_req) - return [install_req], None - - try: - existing_req: Optional[InstallRequirement] = self.get_requirement( - install_req.name - ) - except KeyError: - existing_req = None - - has_conflicting_requirement = ( - parent_req_name is None - and existing_req - and not existing_req.constraint - and existing_req.extras == install_req.extras - and existing_req.req - and install_req.req - and existing_req.req.specifier != install_req.req.specifier - ) - if has_conflicting_requirement: - raise InstallationError( - "Double requirement given: {} (already in {}, name={!r})".format( - install_req, existing_req, install_req.name - ) - ) - - # When no existing requirement exists, add the requirement as a - # dependency and it will be scanned again after. - if not existing_req: - self.add_named_requirement(install_req) - # We'd want to rescan this requirement later - return [install_req], install_req - - # Assume there's no need to scan, and that we've already - # encountered this for scanning. - if install_req.constraint or not existing_req.constraint: - return [], existing_req - - does_not_satisfy_constraint = install_req.link and not ( - existing_req.link and install_req.link.path == existing_req.link.path - ) - if does_not_satisfy_constraint: - raise InstallationError( - "Could not satisfy constraints for '{}': " - "installation from path or url cannot be " - "constrained to a version".format(install_req.name) - ) - # If we're now installing a constraint, mark the existing - # object for real installation. - existing_req.constraint = False - # If we're now installing a user supplied requirement, - # mark the existing object as such. - if install_req.user_supplied: - existing_req.user_supplied = True - existing_req.extras = tuple( - sorted(set(existing_req.extras) | set(install_req.extras)) - ) - logger.debug( - "Setting %s extras to: %s", - existing_req, - existing_req.extras, - ) - # Return the existing requirement for addition to the parent and - # scanning again. - return [existing_req], existing_req - def has_requirement(self, name: str) -> bool: project_name = canonicalize_name(name) diff --git a/pipenv/patched/notpip/_internal/req/req_uninstall.py b/pipenv/patched/notpip/_internal/req/req_uninstall.py index 4dd6561b98..0e2c9e7b3c 100644 --- a/pipenv/patched/notpip/_internal/req/req_uninstall.py +++ b/pipenv/patched/notpip/_internal/req/req_uninstall.py @@ -3,7 +3,7 @@ import sys import sysconfig from importlib.util import cache_from_source -from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple +from typing import Any, Callable, Dict, Generator, Iterable, List, Optional, Set, Tuple from pipenv.patched.notpip._internal.exceptions import UninstallationError from pipenv.patched.notpip._internal.locations import get_bin_prefix, get_bin_user @@ -17,7 +17,9 @@ logger = getLogger(__name__) -def _script_names(bin_dir: str, script_name: str, is_gui: bool) -> Iterator[str]: +def _script_names( + bin_dir: str, script_name: str, is_gui: bool +) -> Generator[str, None, None]: """Create the fully qualified name of the files created by {console,gui}_scripts for the given ``dist``. Returns the list of file names @@ -34,9 +36,11 @@ def _script_names(bin_dir: str, script_name: str, is_gui: bool) -> Iterator[str] yield f"{exe_name}-script.py" -def _unique(fn: Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]]: +def _unique( + fn: Callable[..., Generator[Any, None, None]] +) -> Callable[..., Generator[Any, None, None]]: @functools.wraps(fn) - def unique(*args: Any, **kw: Any) -> Iterator[Any]: + def unique(*args: Any, **kw: Any) -> Generator[Any, None, None]: seen: Set[Any] = set() for item in fn(*args, **kw): if item not in seen: @@ -47,7 +51,7 @@ def unique(*args: Any, **kw: Any) -> Iterator[Any]: @_unique -def uninstallation_paths(dist: BaseDistribution) -> Iterator[str]: +def uninstallation_paths(dist: BaseDistribution) -> Generator[str, None, None]: """ Yield all the uninstallation paths for dist based on RECORD-without-.py[co] @@ -527,7 +531,10 @@ def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet": # above, so this only covers the setuptools-style editable. with open(develop_egg_link) as fh: link_pointer = os.path.normcase(fh.readline().strip()) - assert link_pointer == dist_location, ( + normalized_link_pointer = normalize_path(link_pointer) + assert os.path.samefile( + normalized_link_pointer, normalized_dist_location + ), ( f"Egg-link {link_pointer} does not match installed location of " f"{dist.raw_name} (at {dist_location})" ) @@ -551,10 +558,10 @@ def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet": # find distutils scripts= scripts try: - for script in dist.iterdir("scripts"): - paths_to_remove.add(os.path.join(bin_dir, script.name)) + for script in dist.iter_distutils_script_names(): + paths_to_remove.add(os.path.join(bin_dir, script)) if WINDOWS: - paths_to_remove.add(os.path.join(bin_dir, f"{script.name}.bat")) + paths_to_remove.add(os.path.join(bin_dir, f"{script}.bat")) except (FileNotFoundError, NotADirectoryError): pass @@ -562,7 +569,7 @@ def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet": def iter_scripts_to_remove( dist: BaseDistribution, bin_dir: str, - ) -> Iterator[str]: + ) -> Generator[str, None, None]: for entry_point in dist.iter_entry_points(): if entry_point.group == "console_scripts": yield from _script_names(bin_dir, entry_point.name, False) diff --git a/pipenv/patched/notpip/_internal/resolution/legacy/resolver.py b/pipenv/patched/notpip/_internal/resolution/legacy/resolver.py index a434e16d47..9c4a23a8c8 100644 --- a/pipenv/patched/notpip/_internal/resolution/legacy/resolver.py +++ b/pipenv/patched/notpip/_internal/resolution/legacy/resolver.py @@ -28,12 +28,14 @@ DistributionNotFound, HashError, HashErrors, + InstallationError, NoneMetadataError, UnsupportedPythonVersion, ) from pipenv.patched.notpip._internal.index.package_finder import PackageFinder from pipenv.patched.notpip._internal.metadata import BaseDistribution from pipenv.patched.notpip._internal.models.link import Link +from pipenv.patched.notpip._internal.models.wheel import Wheel from pipenv.patched.notpip._internal.operations.prepare import RequirementPreparer from pipenv.patched.notpip._internal.req.req_install import ( InstallRequirement, @@ -41,6 +43,7 @@ ) from pipenv.patched.notpip._internal.req.req_set import RequirementSet from pipenv.patched.notpip._internal.resolution.base import BaseResolver, InstallRequirementProvider +from pipenv.patched.notpip._internal.utils import compatibility_tags from pipenv.patched.notpip._internal.utils.compatibility_tags import get_supported from pipenv.patched.notpip._internal.utils.logging import indent_log from pipenv.patched.notpip._internal.utils.misc import normalize_version_info @@ -168,7 +171,7 @@ def resolve( for req in root_reqs: if req.constraint: check_invalid_constraint_type(req) - requirement_set.add_requirement(req) + self._add_requirement_to_set(requirement_set, req) # Actually prepare the files, and collect any exceptions. Most hash # exceptions cannot be checked ahead of time, because @@ -188,6 +191,124 @@ def resolve( return requirement_set + def _add_requirement_to_set( + self, + requirement_set: RequirementSet, + install_req: InstallRequirement, + parent_req_name: Optional[str] = None, + extras_requested: Optional[Iterable[str]] = None, + ) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]: + """Add install_req as a requirement to install. + + :param parent_req_name: The name of the requirement that needed this + added. The name is used because when multiple unnamed requirements + resolve to the same name, we could otherwise end up with dependency + links that point outside the Requirements set. parent_req must + already be added. Note that None implies that this is a user + supplied requirement, vs an inferred one. + :param extras_requested: an iterable of extras used to evaluate the + environment markers. + :return: Additional requirements to scan. That is either [] if + the requirement is not applicable, or [install_req] if the + requirement is applicable and has just been added. + """ + # If the markers do not match, ignore this requirement. + if not install_req.match_markers(extras_requested): + logger.info( + "Ignoring %s: markers '%s' don't match your environment", + install_req.name, + install_req.markers, + ) + return [], None + + # If the wheel is not supported, raise an error. + # Should check this after filtering out based on environment markers to + # allow specifying different wheels based on the environment/OS, in a + # single requirements file. + if install_req.link and install_req.link.is_wheel: + wheel = Wheel(install_req.link.filename) + tags = compatibility_tags.get_supported() + if requirement_set.check_supported_wheels and not wheel.supported(tags): + raise InstallationError( + "{} is not a supported wheel on this platform.".format( + wheel.filename + ) + ) + + # This next bit is really a sanity check. + assert ( + not install_req.user_supplied or parent_req_name is None + ), "a user supplied req shouldn't have a parent" + + # Unnamed requirements are scanned again and the requirement won't be + # added as a dependency until after scanning. + if not install_req.name: + requirement_set.add_unnamed_requirement(install_req) + return [install_req], None + + try: + existing_req: Optional[ + InstallRequirement + ] = requirement_set.get_requirement(install_req.name) + except KeyError: + existing_req = None + + has_conflicting_requirement = ( + parent_req_name is None + and existing_req + and not existing_req.constraint + and existing_req.extras == install_req.extras + and existing_req.req + and install_req.req + and existing_req.req.specifier != install_req.req.specifier + ) + if has_conflicting_requirement: + raise InstallationError( + "Double requirement given: {} (already in {}, name={!r})".format( + install_req, existing_req, install_req.name + ) + ) + + # When no existing requirement exists, add the requirement as a + # dependency and it will be scanned again after. + if not existing_req: + requirement_set.add_named_requirement(install_req) + # We'd want to rescan this requirement later + return [install_req], install_req + + # Assume there's no need to scan, and that we've already + # encountered this for scanning. + if install_req.constraint or not existing_req.constraint: + return [], existing_req + + does_not_satisfy_constraint = install_req.link and not ( + existing_req.link and install_req.link.path == existing_req.link.path + ) + if does_not_satisfy_constraint: + raise InstallationError( + "Could not satisfy constraints for '{}': " + "installation from path or url cannot be " + "constrained to a version".format(install_req.name) + ) + # If we're now installing a constraint, mark the existing + # object for real installation. + existing_req.constraint = False + # If we're now installing a user supplied requirement, + # mark the existing object as such. + if install_req.user_supplied: + existing_req.user_supplied = True + existing_req.extras = tuple( + sorted(set(existing_req.extras) | set(install_req.extras)) + ) + logger.debug( + "Setting %s extras to: %s", + existing_req, + existing_req.extras, + ) + # Return the existing requirement for addition to the parent and + # scanning again. + return [existing_req], existing_req + def _is_upgrade_allowed(self, req: InstallRequirement) -> bool: if self.upgrade_strategy == "to-satisfy-only": return False @@ -393,7 +514,8 @@ def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None: # the legacy resolver so I'm just not going to bother refactoring. sub_install_req = self._make_install_req(str(subreq), req_to_install) parent_req_name = req_to_install.name - to_scan_again, add_to_parent = requirement_set.add_requirement( + to_scan_again, add_to_parent = self._add_requirement_to_set( + requirement_set, sub_install_req, parent_req_name=parent_req_name, extras_requested=extras_requested, @@ -410,7 +532,9 @@ def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None: # 'unnamed' requirements can only come from being directly # provided by the user. assert req_to_install.user_supplied - requirement_set.add_requirement(req_to_install, parent_req_name=None) + self._add_requirement_to_set( + requirement_set, req_to_install, parent_req_name=None + ) if not self.ignore_dependencies: if req_to_install.extras: diff --git a/pipenv/patched/notpip/_internal/resolution/resolvelib/candidates.py b/pipenv/patched/notpip/_internal/resolution/resolvelib/candidates.py index 8b1a55a306..bc86e434f5 100644 --- a/pipenv/patched/notpip/_internal/resolution/resolvelib/candidates.py +++ b/pipenv/patched/notpip/_internal/resolution/resolvelib/candidates.py @@ -70,6 +70,7 @@ def make_install_req_from_link( global_options=template.global_options, hashes=template.hash_options, ), + config_settings=template.config_settings, ) ireq.original_link = template.original_link ireq.link = link @@ -93,6 +94,7 @@ def make_install_req_from_editable( global_options=template.global_options, hashes=template.hash_options, ), + config_settings=template.config_settings, ) @@ -117,6 +119,7 @@ def _make_install_req_from_dist( global_options=template.global_options, hashes=template.hash_options, ), + config_settings=template.config_settings, ) ireq.satisfied_by = dist return ireq diff --git a/pipenv/patched/notpip/_internal/resolution/resolvelib/factory.py b/pipenv/patched/notpip/_internal/resolution/resolvelib/factory.py index c2b17cf9a8..fcf4c1f220 100644 --- a/pipenv/patched/notpip/_internal/resolution/resolvelib/factory.py +++ b/pipenv/patched/notpip/_internal/resolution/resolvelib/factory.py @@ -617,8 +617,15 @@ def _report_single_requirement_conflict( req_disp = f"{req} (from {parent.name})" cands = self._finder.find_all_candidates(req.project_name) + skipped_by_requires_python = self._finder.requires_python_skipped_reasons() versions = [str(v) for v in sorted({c.version for c in cands})] + if skipped_by_requires_python: + logger.critical( + "Ignored the following versions that require a different python " + "version: %s", + "; ".join(skipped_by_requires_python) or "none", + ) logger.critical( "Could not find a version that satisfies the requirement %s " "(from versions: %s)", diff --git a/pipenv/patched/notpip/_internal/self_outdated_check.py b/pipenv/patched/notpip/_internal/self_outdated_check.py index 3376186cf0..09fcb68b8e 100644 --- a/pipenv/patched/notpip/_internal/self_outdated_check.py +++ b/pipenv/patched/notpip/_internal/self_outdated_check.py @@ -1,23 +1,34 @@ import datetime +import functools import hashlib import json import logging import optparse import os.path import sys -from typing import Any, Dict +from dataclasses import dataclass +from typing import Any, Callable, Dict, Optional from pipenv.patched.notpip._vendor.packaging.version import parse as parse_version +from pipenv.patched.notpip._vendor.rich.console import Group +from pipenv.patched.notpip._vendor.rich.markup import escape +from pipenv.patched.notpip._vendor.rich.text import Text from pipenv.patched.notpip._internal.index.collector import LinkCollector from pipenv.patched.notpip._internal.index.package_finder import PackageFinder from pipenv.patched.notpip._internal.metadata import get_default_environment +from pipenv.patched.notpip._internal.metadata.base import DistributionVersion from pipenv.patched.notpip._internal.models.selection_prefs import SelectionPreferences from pipenv.patched.notpip._internal.network.session import PipSession +from pipenv.patched.notpip._internal.utils.compat import WINDOWS +from pipenv.patched.notpip._internal.utils.entrypoints import ( + get_best_invocation_for_this_pip, + get_best_invocation_for_this_python, +) from pipenv.patched.notpip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace from pipenv.patched.notpip._internal.utils.misc import ensure_dir -SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" +_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" logger = logging.getLogger(__name__) @@ -31,17 +42,17 @@ def _get_statefile_name(key: str) -> str: class SelfCheckState: def __init__(self, cache_dir: str) -> None: - self.state: Dict[str, Any] = {} - self.statefile_path = None + self._state: Dict[str, Any] = {} + self._statefile_path = None # Try to load the existing state if cache_dir: - self.statefile_path = os.path.join( + self._statefile_path = os.path.join( cache_dir, "selfcheck", _get_statefile_name(self.key) ) try: - with open(self.statefile_path, encoding="utf-8") as statefile: - self.state = json.load(statefile) + with open(self._statefile_path, encoding="utf-8") as statefile: + self._state = json.load(statefile) except (OSError, ValueError, KeyError): # Explicitly suppressing exceptions, since we don't want to # error out if the cache file is invalid. @@ -51,41 +62,87 @@ def __init__(self, cache_dir: str) -> None: def key(self) -> str: return sys.prefix - def save(self, pypi_version: str, current_time: datetime.datetime) -> None: + def get(self, current_time: datetime.datetime) -> Optional[str]: + """Check if we have a not-outdated version loaded already.""" + if not self._state: + return None + + if "last_check" not in self._state: + return None + + if "pypi_version" not in self._state: + return None + + seven_days_in_seconds = 7 * 24 * 60 * 60 + + # Determine if we need to refresh the state + last_check = datetime.datetime.strptime(self._state["last_check"], _DATE_FMT) + seconds_since_last_check = (current_time - last_check).total_seconds() + if seconds_since_last_check > seven_days_in_seconds: + return None + + return self._state["pypi_version"] + + def set(self, pypi_version: str, current_time: datetime.datetime) -> None: # If we do not have a path to cache in, don't bother saving. - if not self.statefile_path: + if not self._statefile_path: return # Check to make sure that we own the directory - if not check_path_owner(os.path.dirname(self.statefile_path)): + if not check_path_owner(os.path.dirname(self._statefile_path)): return # Now that we've ensured the directory is owned by this user, we'll go # ahead and make sure that all our directories are created. - ensure_dir(os.path.dirname(self.statefile_path)) + ensure_dir(os.path.dirname(self._statefile_path)) state = { # Include the key so it's easy to tell which pip wrote the # file. "key": self.key, - "last_check": current_time.strftime(SELFCHECK_DATE_FMT), + "last_check": current_time.strftime(_DATE_FMT), "pypi_version": pypi_version, } text = json.dumps(state, sort_keys=True, separators=(",", ":")) - with adjacent_tmp_file(self.statefile_path) as f: + with adjacent_tmp_file(self._statefile_path) as f: f.write(text.encode()) try: # Since we have a prefix-specific state file, we can just # overwrite whatever is there, no need to check. - replace(f.name, self.statefile_path) + replace(f.name, self._statefile_path) except OSError: # Best effort. pass +@dataclass +class UpgradePrompt: + old: str + new: str + + def __rich__(self) -> Group: + if WINDOWS: + pip_cmd = f"{get_best_invocation_for_this_python()} -m pip" + else: + pip_cmd = get_best_invocation_for_this_pip() + + notice = "[bold][[reset][blue]notice[reset][bold]][reset]" + return Group( + Text(), + Text.from_markup( + f"{notice} A new release of pip available: " + f"[red]{self.old}[reset] -> [green]{self.new}[reset]" + ), + Text.from_markup( + f"{notice} To update, run: " + f"[green]{escape(pip_cmd)} install --upgrade pip" + ), + ) + + def was_installed_by_pip(pkg: str) -> bool: """Checks whether pkg was installed by pip @@ -96,6 +153,66 @@ def was_installed_by_pip(pkg: str) -> bool: return dist is not None and "pip" == dist.installer +def _get_current_remote_pip_version( + session: PipSession, options: optparse.Values +) -> str: + # Lets use PackageFinder to see what the latest pip version is + link_collector = LinkCollector.create( + session, + options=options, + suppress_no_index=True, + ) + + # Pass allow_yanked=False so we don't suggest upgrading to a + # yanked version. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=False, # Explicitly set to False + ) + + finder = PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + use_deprecated_html5lib=("html5lib" in options.deprecated_features_enabled), + ) + best_candidate = finder.find_best_candidate("pip").best_candidate + if best_candidate is None: + return + + return str(best_candidate.version) + + +def _self_version_check_logic( + *, + state: SelfCheckState, + current_time: datetime.datetime, + local_version: DistributionVersion, + get_remote_version: Callable[[], str], +) -> Optional[UpgradePrompt]: + remote_version_str = state.get(current_time) + if remote_version_str is None: + remote_version_str = get_remote_version() + state.set(remote_version_str, current_time) + + remote_version = parse_version(remote_version_str) + logger.debug("Remote version of pip: %s", remote_version) + logger.debug("Local version of pip: %s", local_version) + + pip_installed_by_pip = was_installed_by_pip("pip") + logger.debug("Was pip installed by pip? %s", pip_installed_by_pip) + if not pip_installed_by_pip: + return None # Only suggest upgrade if pip is installed by pip. + + local_version_is_older = ( + local_version < remote_version + and local_version.base_version != remote_version.base_version + ) + if local_version_is_older: + return UpgradePrompt(old=str(local_version), new=remote_version_str) + + return None + + def pip_self_version_check(session: PipSession, options: optparse.Values) -> None: """Check for an update for pip. @@ -107,83 +224,17 @@ def pip_self_version_check(session: PipSession, options: optparse.Values) -> Non if not installed_dist: return - pip_version = installed_dist.version - pypi_version = None - try: - state = SelfCheckState(cache_dir=options.cache_dir) - - current_time = datetime.datetime.utcnow() - # Determine if we need to refresh the state - if "last_check" in state.state and "pypi_version" in state.state: - last_check = datetime.datetime.strptime( - state.state["last_check"], SELFCHECK_DATE_FMT - ) - if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60: - pypi_version = state.state["pypi_version"] - - # Refresh the version if we need to or just see if we need to warn - if pypi_version is None: - # Lets use PackageFinder to see what the latest pip version is - link_collector = LinkCollector.create( - session, - options=options, - suppress_no_index=True, - ) - - # Pass allow_yanked=False so we don't suggest upgrading to a - # yanked version. - selection_prefs = SelectionPreferences( - allow_yanked=False, - allow_all_prereleases=False, # Explicitly set to False - ) - - finder = PackageFinder.create( - link_collector=link_collector, - selection_prefs=selection_prefs, - use_deprecated_html5lib=( - "html5lib" in options.deprecated_features_enabled - ), - ) - best_candidate = finder.find_best_candidate("pip").best_candidate - if best_candidate is None: - return - pypi_version = str(best_candidate.version) - - # save that we've performed a check - state.save(pypi_version, current_time) - - remote_version = parse_version(pypi_version) - - local_version_is_older = ( - pip_version < remote_version - and pip_version.base_version != remote_version.base_version - and was_installed_by_pip("pip") - ) - - # Determine if our pypi_version is older - if not local_version_is_older: - return - - # We cannot tell how the current pip is available in the current - # command context, so be pragmatic here and suggest the command - # that's always available. This does not accommodate spaces in - # `sys.executable` on purpose as it is not possible to do it - # correctly without knowing the user's shell. Thus, - # it won't be done until possible through the standard library. - # Do not be tempted to use the undocumented subprocess.list2cmdline. - # It is considered an internal implementation detail for a reason. - pip_cmd = f"{sys.executable} -m pip" - logger.warning( - "You are using pip version %s; however, version %s is " - "available.\nYou should consider upgrading via the " - "'%s install --upgrade pip' command.", - pip_version, - pypi_version, - pip_cmd, + upgrade_prompt = _self_version_check_logic( + state=SelfCheckState(cache_dir=options.cache_dir), + current_time=datetime.datetime.utcnow(), + local_version=installed_dist.version, + get_remote_version=functools.partial( + _get_current_remote_pip_version, session, options + ), ) + if upgrade_prompt is not None: + logger.info("[present-rich] %s", upgrade_prompt) except Exception: - logger.debug( - "There was an error checking the latest version of pip", - exc_info=True, - ) + logger.warning("There was an error checking the latest version of pip.") + logger.debug("See below for error", exc_info=True) diff --git a/pipenv/patched/notpip/_internal/utils/encoding.py b/pipenv/patched/notpip/_internal/utils/encoding.py index 1c73f6c9a5..008f06a79b 100644 --- a/pipenv/patched/notpip/_internal/utils/encoding.py +++ b/pipenv/patched/notpip/_internal/utils/encoding.py @@ -14,7 +14,7 @@ (codecs.BOM_UTF32_LE, "utf-32-le"), ] -ENCODING_RE = re.compile(br"coding[:=]\s*([-\w.]+)") +ENCODING_RE = re.compile(rb"coding[:=]\s*([-\w.]+)") def auto_decode(data: bytes) -> str: diff --git a/pipenv/patched/notpip/_internal/utils/entrypoints.py b/pipenv/patched/notpip/_internal/utils/entrypoints.py index 7adda2fddf..db3565ea58 100644 --- a/pipenv/patched/notpip/_internal/utils/entrypoints.py +++ b/pipenv/patched/notpip/_internal/utils/entrypoints.py @@ -1,7 +1,23 @@ +import itertools +import os +import shutil import sys from typing import List, Optional from pipenv.patched.notpip._internal.cli.main import main +from pipenv.patched.notpip._internal.utils.compat import WINDOWS + +_EXECUTABLE_NAMES = [ + "pip", + f"pip{sys.version_info.major}", + f"pip{sys.version_info.major}.{sys.version_info.minor}", +] +if WINDOWS: + _allowed_extensions = {"", ".exe"} + _EXECUTABLE_NAMES = [ + "".join(parts) + for parts in itertools.product(_EXECUTABLE_NAMES, _allowed_extensions) + ] def _wrapper(args: Optional[List[str]] = None) -> int: @@ -25,3 +41,39 @@ def _wrapper(args: Optional[List[str]] = None) -> int: "running pip directly.\n" ) return main(args) + + +def get_best_invocation_for_this_pip() -> str: + """Try to figure out the best way to invoke pip in the current environment.""" + binary_directory = "Scripts" if WINDOWS else "bin" + binary_prefix = os.path.join(sys.prefix, binary_directory) + + # Try to use pip[X[.Y]] names, if those executables for this environment are + # the first on PATH with that name. + path_parts = os.path.normcase(os.environ.get("PATH", "")).split(os.pathsep) + exe_are_in_PATH = os.path.normcase(binary_prefix) in path_parts + if exe_are_in_PATH: + for exe_name in _EXECUTABLE_NAMES: + found_executable = shutil.which(exe_name) + if found_executable and os.path.samefile( + found_executable, + os.path.join(binary_prefix, exe_name), + ): + return exe_name + + # Use the `-m` invocation, if there's no "nice" invocation. + return f"{get_best_invocation_for_this_python()} -m pip" + + +def get_best_invocation_for_this_python() -> str: + """Try to figure out the best way to invoke the current Python.""" + exe = sys.executable + exe_name = os.path.basename(exe) + + # Try to use the basename, if it's the first executable. + found_executable = shutil.which(exe_name) + if found_executable and os.path.samefile(found_executable, exe): + return exe_name + + # Use the full executable name, because we couldn't find something simpler. + return exe diff --git a/pipenv/patched/notpip/_internal/utils/filesystem.py b/pipenv/patched/notpip/_internal/utils/filesystem.py index c5c2929d8a..6e6af5f530 100644 --- a/pipenv/patched/notpip/_internal/utils/filesystem.py +++ b/pipenv/patched/notpip/_internal/utils/filesystem.py @@ -2,12 +2,10 @@ import os import os.path import random -import shutil -import stat import sys from contextlib import contextmanager from tempfile import NamedTemporaryFile -from typing import Any, BinaryIO, Iterator, List, Union, cast +from typing import Any, BinaryIO, Generator, List, Union, cast from pipenv.patched.notpip._vendor.tenacity import retry, stop_after_delay, wait_fixed @@ -42,35 +40,8 @@ def check_path_owner(path: str) -> bool: return False # assume we don't own the path -def copy2_fixed(src: str, dest: str) -> None: - """Wrap shutil.copy2() but map errors copying socket files to - SpecialFileError as expected. - - See also https://bugs.python.org/issue37700. - """ - try: - shutil.copy2(src, dest) - except OSError: - for f in [src, dest]: - try: - is_socket_file = is_socket(f) - except OSError: - # An error has already occurred. Another error here is not - # a problem and we can ignore it. - pass - else: - if is_socket_file: - raise shutil.SpecialFileError(f"`{f}` is a socket") - - raise - - -def is_socket(path: str) -> bool: - return stat.S_ISSOCK(os.lstat(path).st_mode) - - @contextmanager -def adjacent_tmp_file(path: str, **kwargs: Any) -> Iterator[BinaryIO]: +def adjacent_tmp_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]: """Return a file-like object pointing to a tmp file next to path. The file is created securely and is ensured to be written to disk diff --git a/pipenv/patched/notpip/_internal/utils/hashes.py b/pipenv/patched/notpip/_internal/utils/hashes.py index 88692c4412..85ee7feab5 100644 --- a/pipenv/patched/notpip/_internal/utils/hashes.py +++ b/pipenv/patched/notpip/_internal/utils/hashes.py @@ -1,5 +1,5 @@ import hashlib -from typing import TYPE_CHECKING, BinaryIO, Dict, Iterator, List +from typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List from pipenv.patched.notpip._internal.exceptions import HashMismatch, HashMissing, InstallationError from pipenv.patched.notpip._internal.utils.misc import read_chunks @@ -67,7 +67,7 @@ def is_hash_allowed(self, hash_name: str, hex_digest: str) -> bool: """Return whether the given hex digest is allowed.""" return hex_digest in self._allowed.get(hash_name, []) - def check_against_chunks(self, chunks: Iterator[bytes]) -> None: + def check_against_chunks(self, chunks: Iterable[bytes]) -> None: """Check good hashes against ones built from iterable of chunks of data. diff --git a/pipenv/patched/notpip/_internal/utils/logging.py b/pipenv/patched/notpip/_internal/utils/logging.py index cc9a6abe78..0b56f22863 100644 --- a/pipenv/patched/notpip/_internal/utils/logging.py +++ b/pipenv/patched/notpip/_internal/utils/logging.py @@ -6,21 +6,23 @@ import sys import threading from dataclasses import dataclass +from io import TextIOWrapper from logging import Filter -from typing import IO, Any, ClassVar, Iterator, List, Optional, TextIO, Type +from typing import Any, ClassVar, Generator, List, Optional, TextIO, Type from pipenv.patched.notpip._vendor.rich.console import ( Console, ConsoleOptions, ConsoleRenderable, + RenderableType, RenderResult, + RichCast, ) from pipenv.patched.notpip._vendor.rich.highlighter import NullHighlighter from pipenv.patched.notpip._vendor.rich.logging import RichHandler from pipenv.patched.notpip._vendor.rich.segment import Segment from pipenv.patched.notpip._vendor.rich.style import Style -from pipenv.patched.notpip._internal.exceptions import DiagnosticPipError from pipenv.patched.notpip._internal.utils._log import VERBOSE, getLogger from pipenv.patched.notpip._internal.utils.compat import WINDOWS from pipenv.patched.notpip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX @@ -50,7 +52,7 @@ def _is_broken_pipe_error(exc_class: Type[BaseException], exc: BaseException) -> @contextlib.contextmanager -def indent_log(num: int = 2) -> Iterator[None]: +def indent_log(num: int = 2) -> Generator[None, None, None]: """ A context manager which will cause the log output to be indented for any log messages emitted inside it. @@ -121,7 +123,7 @@ def format(self, record: logging.LogRecord) -> str: @dataclass class IndentedRenderable: - renderable: ConsoleRenderable + renderable: RenderableType indent: int def __rich_console__( @@ -152,12 +154,15 @@ def emit(self, record: logging.LogRecord) -> None: style: Optional[Style] = None # If we are given a diagnostic error to present, present it with indentation. - if record.msg == "[present-diagnostic] %s" and len(record.args) == 1: - diagnostic_error: DiagnosticPipError = record.args[0] # type: ignore[index] - assert isinstance(diagnostic_error, DiagnosticPipError) - - renderable: ConsoleRenderable = IndentedRenderable( - diagnostic_error, indent=get_indentation() + assert isinstance(record.args, tuple) + if record.msg == "[present-rich] %s" and len(record.args) == 1: + rich_renderable = record.args[0] + assert isinstance( + rich_renderable, (ConsoleRenderable, RichCast, str) + ), f"{rich_renderable} is not rich-console-renderable" + + renderable: RenderableType = IndentedRenderable( + rich_renderable, indent=get_indentation() ) else: message = self.format(record) @@ -193,7 +198,7 @@ def handleError(self, record: logging.LogRecord) -> None: class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): - def _open(self) -> IO[Any]: + def _open(self) -> TextIOWrapper: ensure_dir(os.path.dirname(self.baseFilename)) return super()._open() diff --git a/pipenv/patched/notpip/_internal/utils/misc.py b/pipenv/patched/notpip/_internal/utils/misc.py index 0c79796f1a..2a458878e0 100644 --- a/pipenv/patched/notpip/_internal/utils/misc.py +++ b/pipenv/patched/notpip/_internal/utils/misc.py @@ -21,6 +21,8 @@ BinaryIO, Callable, ContextManager, + Dict, + Generator, Iterable, Iterator, List, @@ -32,6 +34,7 @@ cast, ) +from pipenv.patched.notpip._vendor.pep517 import Pep517HookCaller from pipenv.patched.notpip._vendor.tenacity import retry, stop_after_delay, wait_fixed from pipenv.patched.notpip import __version__ @@ -54,6 +57,7 @@ "captured_stdout", "ensure_dir", "remove_auth_from_url", + "ConfiguredPep517HookCaller", ] @@ -264,7 +268,9 @@ def is_installable_dir(path: str) -> bool: return False -def read_chunks(file: BinaryIO, size: int = io.DEFAULT_BUFFER_SIZE) -> Iterator[bytes]: +def read_chunks( + file: BinaryIO, size: int = io.DEFAULT_BUFFER_SIZE +) -> Generator[bytes, None, None]: """Yield pieces of data from a file-like object until EOF.""" while True: chunk = file.read(size) @@ -346,7 +352,7 @@ def encoding(self): # type: ignore @contextlib.contextmanager -def captured_output(stream_name: str) -> Iterator[StreamWrapper]: +def captured_output(stream_name: str) -> Generator[StreamWrapper, None, None]: """Return a context manager used by captured_stdout/stdin/stderr that temporarily replaces the sys stream *stream_name* with a StringIO. @@ -556,9 +562,9 @@ def protect_pip_from_modification_on_windows(modifying_pip: bool) -> None: python -m pip ... """ pip_names = [ - "pip.exe", - "pip{}.exe".format(sys.version_info[0]), - "pip{}.{}.exe".format(*sys.version_info[:2]), + "pip", + f"pip{sys.version_info.major}", + f"pip{sys.version_info.major}.{sys.version_info.minor}", ] # See https://github.com/pypa/pip/issues/1299 for more discussion @@ -627,3 +633,91 @@ def partition( """ t1, t2 = tee(iterable) return filterfalse(pred, t1), filter(pred, t2) + + +class ConfiguredPep517HookCaller(Pep517HookCaller): + def __init__( + self, + config_holder: Any, + source_dir: str, + build_backend: str, + backend_path: Optional[str] = None, + runner: Optional[Callable[..., None]] = None, + python_executable: Optional[str] = None, + ): + super().__init__( + source_dir, build_backend, backend_path, runner, python_executable + ) + self.config_holder = config_holder + + def build_wheel( + self, + wheel_directory: str, + config_settings: Optional[Dict[str, str]] = None, + metadata_directory: Optional[str] = None, + ) -> str: + cs = self.config_holder.config_settings + return super().build_wheel( + wheel_directory, config_settings=cs, metadata_directory=metadata_directory + ) + + def build_sdist( + self, sdist_directory: str, config_settings: Optional[Dict[str, str]] = None + ) -> str: + cs = self.config_holder.config_settings + return super().build_sdist(sdist_directory, config_settings=cs) + + def build_editable( + self, + wheel_directory: str, + config_settings: Optional[Dict[str, str]] = None, + metadata_directory: Optional[str] = None, + ) -> str: + cs = self.config_holder.config_settings + return super().build_editable( + wheel_directory, config_settings=cs, metadata_directory=metadata_directory + ) + + def get_requires_for_build_wheel( + self, config_settings: Optional[Dict[str, str]] = None + ) -> List[str]: + cs = self.config_holder.config_settings + return super().get_requires_for_build_wheel(config_settings=cs) + + def get_requires_for_build_sdist( + self, config_settings: Optional[Dict[str, str]] = None + ) -> List[str]: + cs = self.config_holder.config_settings + return super().get_requires_for_build_sdist(config_settings=cs) + + def get_requires_for_build_editable( + self, config_settings: Optional[Dict[str, str]] = None + ) -> List[str]: + cs = self.config_holder.config_settings + return super().get_requires_for_build_editable(config_settings=cs) + + def prepare_metadata_for_build_wheel( + self, + metadata_directory: str, + config_settings: Optional[Dict[str, str]] = None, + _allow_fallback: bool = True, + ) -> str: + cs = self.config_holder.config_settings + return super().prepare_metadata_for_build_wheel( + metadata_directory=metadata_directory, + config_settings=cs, + _allow_fallback=_allow_fallback, + ) + + def prepare_metadata_for_build_editable( + self, + metadata_directory: str, + config_settings: Optional[Dict[str, str]] = None, + _allow_fallback: bool = True, + ) -> str: + cs = self.config_holder.config_settings + return super().prepare_metadata_for_build_editable( + metadata_directory=metadata_directory, + config_settings=cs, + _allow_fallback=_allow_fallback, + ) diff --git a/pipenv/patched/notpip/_internal/utils/subprocess.py b/pipenv/patched/notpip/_internal/utils/subprocess.py index 16cff7979e..940ee31f2f 100644 --- a/pipenv/patched/notpip/_internal/utils/subprocess.py +++ b/pipenv/patched/notpip/_internal/utils/subprocess.py @@ -116,7 +116,7 @@ def call_subprocess( # replaced by INFO. if show_stdout: # Then log the subprocess output at INFO level. - log_subprocess = subprocess_logger.info + log_subprocess: Callable[..., None] = subprocess_logger.info used_level = logging.INFO else: # Then log the subprocess output using VERBOSE. This also ensures @@ -209,7 +209,7 @@ def call_subprocess( output_lines=all_output if not showing_subprocess else None, ) if log_failed_cmd: - subprocess_logger.error("[present-diagnostic] %s", error) + subprocess_logger.error("[present-rich] %s", error) subprocess_logger.verbose( "[bold magenta]full command[/]: [blue]%s[/]", escape(format_command_args(cmd)), diff --git a/pipenv/patched/notpip/_internal/utils/temp_dir.py b/pipenv/patched/notpip/_internal/utils/temp_dir.py index cf26edec91..2be5822a45 100644 --- a/pipenv/patched/notpip/_internal/utils/temp_dir.py +++ b/pipenv/patched/notpip/_internal/utils/temp_dir.py @@ -4,7 +4,7 @@ import os.path import tempfile from contextlib import ExitStack, contextmanager -from typing import Any, Dict, Iterator, Optional, TypeVar, Union +from typing import Any, Dict, Generator, Optional, TypeVar, Union from pipenv.patched.notpip._internal.utils.misc import enum, rmtree @@ -26,7 +26,7 @@ @contextmanager -def global_tempdir_manager() -> Iterator[None]: +def global_tempdir_manager() -> Generator[None, None, None]: global _tempdir_manager with ExitStack() as stack: old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack @@ -59,7 +59,7 @@ def get_delete(self, kind: str) -> bool: @contextmanager -def tempdir_registry() -> Iterator[TempDirectoryTypeRegistry]: +def tempdir_registry() -> Generator[TempDirectoryTypeRegistry, None, None]: """Provides a scoped global tempdir registry that can be used to dictate whether directories should be deleted. """ @@ -200,7 +200,7 @@ def __init__(self, original: str, delete: Optional[bool] = None) -> None: super().__init__(delete=delete) @classmethod - def _generate_names(cls, name: str) -> Iterator[str]: + def _generate_names(cls, name: str) -> Generator[str, None, None]: """Generates a series of temporary names. The algorithm replaces the leading characters in the name diff --git a/pipenv/patched/notpip/_internal/utils/unpacking.py b/pipenv/patched/notpip/_internal/utils/unpacking.py index 0a9b5f4500..df5295a3d0 100644 --- a/pipenv/patched/notpip/_internal/utils/unpacking.py +++ b/pipenv/patched/notpip/_internal/utils/unpacking.py @@ -188,8 +188,7 @@ def untar_file(filename: str, location: str) -> None: ensure_dir(path) elif member.issym(): try: - # https://github.com/python/typeshed/issues/2673 - tar._extract_member(member, path) # type: ignore + tar._extract_member(member, path) except Exception as exc: # Some corrupt tar files seem to produce this # (specifically bad symlinks) diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/__init__.py b/pipenv/patched/notpip/_vendor/cachecontrol/__init__.py index 8435d628d2..f631ae6df4 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/__init__.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/__init__.py @@ -8,7 +8,7 @@ """ __author__ = "Eric Larson" __email__ = "eric@ionrock.org" -__version__ = "0.12.10" +__version__ = "0.12.11" from .wrapper import CacheControl from .adapter import CacheControlAdapter diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/cache.py b/pipenv/patched/notpip/_vendor/cachecontrol/cache.py index 44e4309d20..2a965f595f 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/cache.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/cache.py @@ -41,3 +41,25 @@ def delete(self, key): with self.lock: if key in self.data: self.data.pop(key) + + +class SeparateBodyBaseCache(BaseCache): + """ + In this variant, the body is not stored mixed in with the metadata, but is + passed in (as a bytes-like object) in a separate call to ``set_body()``. + + That is, the expected interaction pattern is:: + + cache.set(key, serialized_metadata) + cache.set_body(key) + + Similarly, the body should be loaded separately via ``get_body()``. + """ + def set_body(self, key, body): + raise NotImplementedError() + + def get_body(self, key): + """ + Return the body as file-like object. + """ + raise NotImplementedError() diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/caches/__init__.py b/pipenv/patched/notpip/_vendor/cachecontrol/caches/__init__.py index 44becd6843..37827291fb 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/caches/__init__.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/caches/__init__.py @@ -2,5 +2,8 @@ # # SPDX-License-Identifier: Apache-2.0 -from .file_cache import FileCache # noqa -from .redis_cache import RedisCache # noqa +from .file_cache import FileCache, SeparateBodyFileCache +from .redis_cache import RedisCache + + +__all__ = ["FileCache", "SeparateBodyFileCache", "RedisCache"] diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/caches/file_cache.py b/pipenv/patched/notpip/_vendor/cachecontrol/caches/file_cache.py index 6cd1106f88..f1ddb2ebdf 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/caches/file_cache.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/caches/file_cache.py @@ -6,7 +6,7 @@ import os from textwrap import dedent -from ..cache import BaseCache +from ..cache import BaseCache, SeparateBodyBaseCache from ..controller import CacheController try: @@ -57,7 +57,8 @@ def _secure_open_write(filename, fmode): raise -class FileCache(BaseCache): +class _FileCacheMixin: + """Shared implementation for both FileCache variants.""" def __init__( self, @@ -120,20 +121,25 @@ def get(self, key): def set(self, key, value, expires=None): name = self._fn(key) + self._write(name, value) + def _write(self, path, data: bytes): + """ + Safely write the data to the given path. + """ # Make sure the directory exists try: - os.makedirs(os.path.dirname(name), self.dirmode) + os.makedirs(os.path.dirname(path), self.dirmode) except (IOError, OSError): pass - with self.lock_class(name) as lock: + with self.lock_class(path) as lock: # Write our actual file with _secure_open_write(lock.path, self.filemode) as fh: - fh.write(value) + fh.write(data) - def delete(self, key): - name = self._fn(key) + def _delete(self, key, suffix): + name = self._fn(key) + suffix if not self.forever: try: os.remove(name) @@ -141,6 +147,38 @@ def delete(self, key): pass +class FileCache(_FileCacheMixin, BaseCache): + """ + Traditional FileCache: body is stored in memory, so not suitable for large + downloads. + """ + + def delete(self, key): + self._delete(key, "") + + +class SeparateBodyFileCache(_FileCacheMixin, SeparateBodyBaseCache): + """ + Memory-efficient FileCache: body is stored in a separate file, reducing + peak memory usage. + """ + + def get_body(self, key): + name = self._fn(key) + ".body" + try: + return open(name, "rb") + except FileNotFoundError: + return None + + def set_body(self, key, body): + name = self._fn(key) + ".body" + self._write(name, body) + + def delete(self, key): + self._delete(key, "") + self._delete(key, ".body") + + def url_to_file_path(url, filecache): """Return the file cache path based on the URL. diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/caches/redis_cache.py b/pipenv/patched/notpip/_vendor/cachecontrol/caches/redis_cache.py index da97071b8e..cb2b542e4e 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/caches/redis_cache.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/caches/redis_cache.py @@ -19,9 +19,11 @@ def get(self, key): def set(self, key, value, expires=None): if not expires: self.conn.set(key, value) - else: + elif isinstance(expires, datetime): expires = expires - datetime.utcnow() self.conn.setex(key, int(expires.total_seconds()), value) + else: + self.conn.setex(key, expires, value) def delete(self, key): self.conn.delete(key) diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/controller.py b/pipenv/patched/notpip/_vendor/cachecontrol/controller.py index 99b754a5b7..8450495ffd 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/controller.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/controller.py @@ -13,7 +13,7 @@ from pipenv.patched.notpip._vendor.requests.structures import CaseInsensitiveDict -from .cache import DictCache +from .cache import DictCache, SeparateBodyBaseCache from .serialize import Serializer @@ -27,15 +27,14 @@ def parse_uri(uri): """Parses a URI using the regex given in Appendix B of RFC 3986. - (scheme, authority, path, query, fragment) = parse_uri(uri) + (scheme, authority, path, query, fragment) = parse_uri(uri) """ groups = URI.match(uri).groups() return (groups[1], groups[3], groups[4], groups[6], groups[8]) class CacheController(object): - """An interface to see if request should cached or not. - """ + """An interface to see if request should cached or not.""" def __init__( self, cache=None, cache_etags=True, serializer=None, status_codes=None @@ -147,8 +146,13 @@ def cached_request(self, request): logger.debug("No cache entry available") return False + if isinstance(self.cache, SeparateBodyBaseCache): + body_file = self.cache.get_body(cache_url) + else: + body_file = None + # Check whether it can be deserialized - resp = self.serializer.loads(request, cache_data) + resp = self.serializer.loads(request, cache_data, body_file) if not resp: logger.warning("Cache entry deserialization failed, entry ignored") return False @@ -251,6 +255,26 @@ def conditional_headers(self, request): return new_headers + def _cache_set(self, cache_url, request, response, body=None, expires_time=None): + """ + Store the data in the cache. + """ + if isinstance(self.cache, SeparateBodyBaseCache): + # We pass in the body separately; just put a placeholder empty + # string in the metadata. + self.cache.set( + cache_url, + self.serializer.dumps(request, response, b""), + expires=expires_time, + ) + self.cache.set_body(cache_url, body) + else: + self.cache.set( + cache_url, + self.serializer.dumps(request, response, body), + expires=expires_time, + ) + def cache_response(self, request, response, body=None, status_codes=None): """ Algorithm for caching requests. @@ -326,17 +350,13 @@ def cache_response(self, request, response, body=None, status_codes=None): logger.debug("etag object cached for {0} seconds".format(expires_time)) logger.debug("Caching due to etag") - self.cache.set( - cache_url, - self.serializer.dumps(request, response, body), - expires=expires_time, - ) + self._cache_set(cache_url, request, response, body, expires_time) # Add to the cache any permanent redirects. We do this before looking # that the Date headers. elif int(response.status) in PERMANENT_REDIRECT_STATUSES: logger.debug("Caching permanent redirect") - self.cache.set(cache_url, self.serializer.dumps(request, response, b"")) + self._cache_set(cache_url, request, response, b"") # Add to the cache if the response headers demand it. If there # is no date header then we can't do anything about expiring @@ -347,10 +367,12 @@ def cache_response(self, request, response, body=None, status_codes=None): if "max-age" in cc and cc["max-age"] > 0: logger.debug("Caching b/c date exists and max-age > 0") expires_time = cc["max-age"] - self.cache.set( + self._cache_set( cache_url, - self.serializer.dumps(request, response, body), - expires=expires_time, + request, + response, + body, + expires_time, ) # If the request can expire, it means we should cache it @@ -368,10 +390,12 @@ def cache_response(self, request, response, body=None, status_codes=None): expires_time ) ) - self.cache.set( + self._cache_set( cache_url, - self.serializer.dumps(request, response, body=body), - expires=expires_time, + request, + response, + body, + expires_time, ) def update_cached_response(self, request, response): @@ -410,6 +434,6 @@ def update_cached_response(self, request, response): cached_response.status = 200 # update our cache - self.cache.set(cache_url, self.serializer.dumps(request, cached_response)) + self._cache_set(cache_url, request, cached_response) return cached_response diff --git a/pipenv/patched/notpip/_vendor/cachecontrol/serialize.py b/pipenv/patched/notpip/_vendor/cachecontrol/serialize.py index 8a67c15a12..a272f274de 100644 --- a/pipenv/patched/notpip/_vendor/cachecontrol/serialize.py +++ b/pipenv/patched/notpip/_vendor/cachecontrol/serialize.py @@ -44,7 +44,7 @@ def dumps(self, request, response, body=None): # enough to have msgpack know the difference. data = { u"response": { - u"body": body, + u"body": body, # Empty bytestring if body is stored separately u"headers": dict( (text_type(k), text_type(v)) for k, v in response.headers.items() ), @@ -69,7 +69,7 @@ def dumps(self, request, response, body=None): return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)]) - def loads(self, request, data): + def loads(self, request, data, body_file=None): # Short circuit if we've been given an empty set of data if not data: return @@ -92,14 +92,14 @@ def loads(self, request, data): # Dispatch to the actual load method for the given version try: - return getattr(self, "_loads_v{}".format(ver))(request, data) + return getattr(self, "_loads_v{}".format(ver))(request, data, body_file) except AttributeError: # This is a version we don't have a loads function for, so we'll # just treat it as a miss and return None return - def prepare_response(self, request, cached): + def prepare_response(self, request, cached, body_file=None): """Verify our vary headers match and construct a real urllib3 HTTPResponse object. """ @@ -125,7 +125,10 @@ def prepare_response(self, request, cached): cached["response"]["headers"] = headers try: - body = io.BytesIO(body_raw) + if body_file is None: + body = io.BytesIO(body_raw) + else: + body = body_file except TypeError: # This can happen if cachecontrol serialized to v1 format (pickle) # using Python 2. A Python 2 str(byte string) will be unpickled as @@ -137,21 +140,22 @@ def prepare_response(self, request, cached): return HTTPResponse(body=body, preload_content=False, **cached["response"]) - def _loads_v0(self, request, data): + def _loads_v0(self, request, data, body_file=None): # The original legacy cache data. This doesn't contain enough # information to construct everything we need, so we'll treat this as # a miss. return - def _loads_v1(self, request, data): + def _loads_v1(self, request, data, body_file=None): try: cached = pickle.loads(data) except ValueError: return - return self.prepare_response(request, cached) + return self.prepare_response(request, cached, body_file) - def _loads_v2(self, request, data): + def _loads_v2(self, request, data, body_file=None): + assert body_file is None try: cached = json.loads(zlib.decompress(data).decode("utf8")) except (ValueError, zlib.error): @@ -169,18 +173,18 @@ def _loads_v2(self, request, data): for k, v in cached["vary"].items() ) - return self.prepare_response(request, cached) + return self.prepare_response(request, cached, body_file) - def _loads_v3(self, request, data): + def _loads_v3(self, request, data, body_file): # Due to Python 2 encoding issues, it's impossible to know for sure # exactly how to load v3 entries, thus we'll treat these as a miss so # that they get rewritten out as v4 entries. return - def _loads_v4(self, request, data): + def _loads_v4(self, request, data, body_file=None): try: cached = msgpack.loads(data, raw=False) except ValueError: return - return self.prepare_response(request, cached) + return self.prepare_response(request, cached, body_file) diff --git a/pipenv/patched/notpip/_vendor/certifi/LICENSE b/pipenv/patched/notpip/_vendor/certifi/LICENSE new file mode 100644 index 0000000000..c2fda9a264 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/certifi/LICENSE @@ -0,0 +1,21 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1# +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/pipenv/patched/notpip/_vendor/chardet/LICENSE b/pipenv/patched/notpip/_vendor/chardet/LICENSE new file mode 100644 index 0000000000..8add30ad59 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/chardet/LICENSE @@ -0,0 +1,504 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! + + diff --git a/pipenv/patched/notpip/_vendor/colorama/LICENSE.txt b/pipenv/patched/notpip/_vendor/colorama/LICENSE.txt new file mode 100644 index 0000000000..3105888ec1 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/colorama/LICENSE.txt @@ -0,0 +1,27 @@ +Copyright (c) 2010 Jonathan Hartley +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holders, nor those of its contributors + may be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pipenv/patched/notpip/_vendor/distlib/LICENSE.txt b/pipenv/patched/notpip/_vendor/distlib/LICENSE.txt new file mode 100644 index 0000000000..c31ac56d77 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/distlib/LICENSE.txt @@ -0,0 +1,284 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations (now Zope +Corporation, see http://www.zope.com). In 2001, the Python Software +Foundation (PSF, see http://www.python.org/psf/) was formed, a +non-profit organization created specifically to own Python-related +Intellectual Property. Zope Corporation is a sponsoring member of +the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.2 2.1.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2.1 2.2 2002 PSF yes + 2.2.2 2.2.1 2002 PSF yes + 2.2.3 2.2.2 2003 PSF yes + 2.3 2.2.2 2002-2003 PSF yes + 2.3.1 2.3 2002-2003 PSF yes + 2.3.2 2.3.1 2002-2003 PSF yes + 2.3.3 2.3.2 2002-2003 PSF yes + 2.3.4 2.3.3 2004 PSF yes + 2.3.5 2.3.4 2005 PSF yes + 2.4 2.3 2004 PSF yes + 2.4.1 2.4 2005 PSF yes + 2.4.2 2.4.1 2005 PSF yes + 2.4.3 2.4.2 2006 PSF yes + 2.4.4 2.4.3 2006 PSF yes + 2.5 2.4 2006 PSF yes + 2.5.1 2.5 2007 PSF yes + 2.5.2 2.5.1 2008 PSF yes + 2.5.3 2.5.2 2008 PSF yes + 2.6 2.5 2008 PSF yes + 2.6.1 2.6 2008 PSF yes + 2.6.2 2.6.1 2009 PSF yes + 2.6.3 2.6.2 2009 PSF yes + 2.6.4 2.6.3 2009 PSF yes + 2.6.5 2.6.4 2010 PSF yes + 3.0 2.6 2008 PSF yes + 3.0.1 3.0 2009 PSF yes + 3.1 3.0.1 2009 PSF yes + 3.1.1 3.1 2009 PSF yes + 3.1.2 3.1 2010 PSF yes + 3.2 3.1 2010 PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 +Python Software Foundation; All Rights Reserved" are retained in Python alone or +in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/pipenv/patched/notpip/_vendor/distlib/_backport/__init__.py b/pipenv/patched/notpip/_vendor/distlib/_backport/__init__.py new file mode 100644 index 0000000000..f7dbf4c9aa --- /dev/null +++ b/pipenv/patched/notpip/_vendor/distlib/_backport/__init__.py @@ -0,0 +1,6 @@ +"""Modules copied from Python 3 standard libraries, for internal use only. + +Individual classes and functions are found in d2._backport.misc. Intended +usage is to always import things missing from 3.1 from that module: the +built-in/stdlib objects will be used if found. +""" diff --git a/pipenv/patched/notpip/_vendor/distlib/_backport/misc.py b/pipenv/patched/notpip/_vendor/distlib/_backport/misc.py new file mode 100644 index 0000000000..cfb318d34f --- /dev/null +++ b/pipenv/patched/notpip/_vendor/distlib/_backport/misc.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Backports for individual classes and functions.""" + +import os +import sys + +__all__ = ['cache_from_source', 'callable', 'fsencode'] + + +try: + from imp import cache_from_source +except ImportError: + def cache_from_source(py_file, debug=__debug__): + ext = debug and 'c' or 'o' + return py_file + ext + + +try: + callable = callable +except NameError: + from collections import Callable + + def callable(obj): + return isinstance(obj, Callable) + + +try: + fsencode = os.fsencode +except AttributeError: + def fsencode(filename): + if isinstance(filename, bytes): + return filename + elif isinstance(filename, str): + return filename.encode(sys.getfilesystemencoding()) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) diff --git a/pipenv/patched/notpip/_vendor/distlib/_backport/shutil.py b/pipenv/patched/notpip/_vendor/distlib/_backport/shutil.py new file mode 100644 index 0000000000..10ed362539 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/distlib/_backport/shutil.py @@ -0,0 +1,764 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Utility functions for copying and archiving files and directory trees. + +XXX The functions here don't copy the resource fork or other metadata on Mac. + +""" + +import os +import sys +import stat +from os.path import abspath +import fnmatch +try: + from collections.abc import Callable +except ImportError: + from collections import Callable +import errno +from . import tarfile + +try: + import bz2 + _BZ2_SUPPORTED = True +except ImportError: + _BZ2_SUPPORTED = False + +try: + from pwd import getpwnam +except ImportError: + getpwnam = None + +try: + from grp import getgrnam +except ImportError: + getgrnam = None + +__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", + "copytree", "move", "rmtree", "Error", "SpecialFileError", + "ExecError", "make_archive", "get_archive_formats", + "register_archive_format", "unregister_archive_format", + "get_unpack_formats", "register_unpack_format", + "unregister_unpack_format", "unpack_archive", "ignore_patterns"] + +class Error(EnvironmentError): + pass + +class SpecialFileError(EnvironmentError): + """Raised when trying to do a kind of operation (e.g. copying) which is + not supported on a special file (e.g. a named pipe)""" + +class ExecError(EnvironmentError): + """Raised when a command could not be executed""" + +class ReadError(EnvironmentError): + """Raised when an archive cannot be read""" + +class RegistryError(Exception): + """Raised when a registry operation with the archiving + and unpacking registries fails""" + + +try: + WindowsError +except NameError: + WindowsError = None + +def copyfileobj(fsrc, fdst, length=16*1024): + """copy data from file-like object fsrc to file-like object fdst""" + while 1: + buf = fsrc.read(length) + if not buf: + break + fdst.write(buf) + +def _samefile(src, dst): + # Macintosh, Unix. + if hasattr(os.path, 'samefile'): + try: + return os.path.samefile(src, dst) + except OSError: + return False + + # All other platforms: check for same pathname. + return (os.path.normcase(os.path.abspath(src)) == + os.path.normcase(os.path.abspath(dst))) + +def copyfile(src, dst): + """Copy data from src to dst""" + if _samefile(src, dst): + raise Error("`%s` and `%s` are the same file" % (src, dst)) + + for fn in [src, dst]: + try: + st = os.stat(fn) + except OSError: + # File most likely does not exist + pass + else: + # XXX What about other special files? (sockets, devices...) + if stat.S_ISFIFO(st.st_mode): + raise SpecialFileError("`%s` is a named pipe" % fn) + + with open(src, 'rb') as fsrc: + with open(dst, 'wb') as fdst: + copyfileobj(fsrc, fdst) + +def copymode(src, dst): + """Copy mode bits from src to dst""" + if hasattr(os, 'chmod'): + st = os.stat(src) + mode = stat.S_IMODE(st.st_mode) + os.chmod(dst, mode) + +def copystat(src, dst): + """Copy all stat info (mode bits, atime, mtime, flags) from src to dst""" + st = os.stat(src) + mode = stat.S_IMODE(st.st_mode) + if hasattr(os, 'utime'): + os.utime(dst, (st.st_atime, st.st_mtime)) + if hasattr(os, 'chmod'): + os.chmod(dst, mode) + if hasattr(os, 'chflags') and hasattr(st, 'st_flags'): + try: + os.chflags(dst, st.st_flags) + except OSError as why: + if (not hasattr(errno, 'EOPNOTSUPP') or + why.errno != errno.EOPNOTSUPP): + raise + +def copy(src, dst): + """Copy data and mode bits ("cp src dst"). + + The destination may be a directory. + + """ + if os.path.isdir(dst): + dst = os.path.join(dst, os.path.basename(src)) + copyfile(src, dst) + copymode(src, dst) + +def copy2(src, dst): + """Copy data and all stat info ("cp -p src dst"). + + The destination may be a directory. + + """ + if os.path.isdir(dst): + dst = os.path.join(dst, os.path.basename(src)) + copyfile(src, dst) + copystat(src, dst) + +def ignore_patterns(*patterns): + """Function that can be used as copytree() ignore parameter. + + Patterns is a sequence of glob-style patterns + that are used to exclude files""" + def _ignore_patterns(path, names): + ignored_names = [] + for pattern in patterns: + ignored_names.extend(fnmatch.filter(names, pattern)) + return set(ignored_names) + return _ignore_patterns + +def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, + ignore_dangling_symlinks=False): + """Recursively copy a directory tree. + + The destination directory must not already exist. + If exception(s) occur, an Error is raised with a list of reasons. + + If the optional symlinks flag is true, symbolic links in the + source tree result in symbolic links in the destination tree; if + it is false, the contents of the files pointed to by symbolic + links are copied. If the file pointed by the symlink doesn't + exist, an exception will be added in the list of errors raised in + an Error exception at the end of the copy process. + + You can set the optional ignore_dangling_symlinks flag to true if you + want to silence this exception. Notice that this has no effect on + platforms that don't support os.symlink. + + The optional ignore argument is a callable. If given, it + is called with the `src` parameter, which is the directory + being visited by copytree(), and `names` which is the list of + `src` contents, as returned by os.listdir(): + + callable(src, names) -> ignored_names + + Since copytree() is called recursively, the callable will be + called once for each directory that is copied. It returns a + list of names relative to the `src` directory that should + not be copied. + + The optional copy_function argument is a callable that will be used + to copy each file. It will be called with the source path and the + destination path as arguments. By default, copy2() is used, but any + function that supports the same signature (like copy()) can be used. + + """ + names = os.listdir(src) + if ignore is not None: + ignored_names = ignore(src, names) + else: + ignored_names = set() + + os.makedirs(dst) + errors = [] + for name in names: + if name in ignored_names: + continue + srcname = os.path.join(src, name) + dstname = os.path.join(dst, name) + try: + if os.path.islink(srcname): + linkto = os.readlink(srcname) + if symlinks: + os.symlink(linkto, dstname) + else: + # ignore dangling symlink if the flag is on + if not os.path.exists(linkto) and ignore_dangling_symlinks: + continue + # otherwise let the copy occurs. copy2 will raise an error + copy_function(srcname, dstname) + elif os.path.isdir(srcname): + copytree(srcname, dstname, symlinks, ignore, copy_function) + else: + # Will raise a SpecialFileError for unsupported file types + copy_function(srcname, dstname) + # catch the Error from the recursive copytree so that we can + # continue with other files + except Error as err: + errors.extend(err.args[0]) + except EnvironmentError as why: + errors.append((srcname, dstname, str(why))) + try: + copystat(src, dst) + except OSError as why: + if WindowsError is not None and isinstance(why, WindowsError): + # Copying file access times may fail on Windows + pass + else: + errors.extend((src, dst, str(why))) + if errors: + raise Error(errors) + +def rmtree(path, ignore_errors=False, onerror=None): + """Recursively delete a directory tree. + + If ignore_errors is set, errors are ignored; otherwise, if onerror + is set, it is called to handle the error with arguments (func, + path, exc_info) where func is os.listdir, os.remove, or os.rmdir; + path is the argument to that function that caused it to fail; and + exc_info is a tuple returned by sys.exc_info(). If ignore_errors + is false and onerror is None, an exception is raised. + + """ + if ignore_errors: + def onerror(*args): + pass + elif onerror is None: + def onerror(*args): + raise + try: + if os.path.islink(path): + # symlinks to directories are forbidden, see bug #1669 + raise OSError("Cannot call rmtree on a symbolic link") + except OSError: + onerror(os.path.islink, path, sys.exc_info()) + # can't continue even if onerror hook returns + return + names = [] + try: + names = os.listdir(path) + except os.error: + onerror(os.listdir, path, sys.exc_info()) + for name in names: + fullname = os.path.join(path, name) + try: + mode = os.lstat(fullname).st_mode + except os.error: + mode = 0 + if stat.S_ISDIR(mode): + rmtree(fullname, ignore_errors, onerror) + else: + try: + os.remove(fullname) + except os.error: + onerror(os.remove, fullname, sys.exc_info()) + try: + os.rmdir(path) + except os.error: + onerror(os.rmdir, path, sys.exc_info()) + + +def _basename(path): + # A basename() variant which first strips the trailing slash, if present. + # Thus we always get the last component of the path, even for directories. + return os.path.basename(path.rstrip(os.path.sep)) + +def move(src, dst): + """Recursively move a file or directory to another location. This is + similar to the Unix "mv" command. + + If the destination is a directory or a symlink to a directory, the source + is moved inside the directory. The destination path must not already + exist. + + If the destination already exists but is not a directory, it may be + overwritten depending on os.rename() semantics. + + If the destination is on our current filesystem, then rename() is used. + Otherwise, src is copied to the destination and then removed. + A lot more could be done here... A look at a mv.c shows a lot of + the issues this implementation glosses over. + + """ + real_dst = dst + if os.path.isdir(dst): + if _samefile(src, dst): + # We might be on a case insensitive filesystem, + # perform the rename anyway. + os.rename(src, dst) + return + + real_dst = os.path.join(dst, _basename(src)) + if os.path.exists(real_dst): + raise Error("Destination path '%s' already exists" % real_dst) + try: + os.rename(src, real_dst) + except OSError: + if os.path.isdir(src): + if _destinsrc(src, dst): + raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) + copytree(src, real_dst, symlinks=True) + rmtree(src) + else: + copy2(src, real_dst) + os.unlink(src) + +def _destinsrc(src, dst): + src = abspath(src) + dst = abspath(dst) + if not src.endswith(os.path.sep): + src += os.path.sep + if not dst.endswith(os.path.sep): + dst += os.path.sep + return dst.startswith(src) + +def _get_gid(name): + """Returns a gid, given a group name.""" + if getgrnam is None or name is None: + return None + try: + result = getgrnam(name) + except KeyError: + result = None + if result is not None: + return result[2] + return None + +def _get_uid(name): + """Returns an uid, given a user name.""" + if getpwnam is None or name is None: + return None + try: + result = getpwnam(name) + except KeyError: + result = None + if result is not None: + return result[2] + return None + +def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, + owner=None, group=None, logger=None): + """Create a (possibly compressed) tar file from all the files under + 'base_dir'. + + 'compress' must be "gzip" (the default), "bzip2", or None. + + 'owner' and 'group' can be used to define an owner and a group for the + archive that is being built. If not provided, the current owner and group + will be used. + + The output tar file will be named 'base_name' + ".tar", possibly plus + the appropriate compression extension (".gz", or ".bz2"). + + Returns the output filename. + """ + tar_compression = {'gzip': 'gz', None: ''} + compress_ext = {'gzip': '.gz'} + + if _BZ2_SUPPORTED: + tar_compression['bzip2'] = 'bz2' + compress_ext['bzip2'] = '.bz2' + + # flags for compression program, each element of list will be an argument + if compress is not None and compress not in compress_ext: + raise ValueError("bad value for 'compress', or compression format not " + "supported : {0}".format(compress)) + + archive_name = base_name + '.tar' + compress_ext.get(compress, '') + archive_dir = os.path.dirname(archive_name) + + if not os.path.exists(archive_dir): + if logger is not None: + logger.info("creating %s", archive_dir) + if not dry_run: + os.makedirs(archive_dir) + + # creating the tarball + if logger is not None: + logger.info('Creating tar archive') + + uid = _get_uid(owner) + gid = _get_gid(group) + + def _set_uid_gid(tarinfo): + if gid is not None: + tarinfo.gid = gid + tarinfo.gname = group + if uid is not None: + tarinfo.uid = uid + tarinfo.uname = owner + return tarinfo + + if not dry_run: + tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) + try: + tar.add(base_dir, filter=_set_uid_gid) + finally: + tar.close() + + return archive_name + +def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False): + # XXX see if we want to keep an external call here + if verbose: + zipoptions = "-r" + else: + zipoptions = "-rq" + from distutils.errors import DistutilsExecError + from distutils.spawn import spawn + try: + spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) + except DistutilsExecError: + # XXX really should distinguish between "couldn't find + # external 'zip' command" and "zip failed". + raise ExecError("unable to create zip file '%s': " + "could neither import the 'zipfile' module nor " + "find a standalone zip utility") % zip_filename + +def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None): + """Create a zip file from all the files under 'base_dir'. + + The output zip file will be named 'base_name' + ".zip". Uses either the + "zipfile" Python module (if available) or the InfoZIP "zip" utility + (if installed and found on the default search path). If neither tool is + available, raises ExecError. Returns the name of the output zip + file. + """ + zip_filename = base_name + ".zip" + archive_dir = os.path.dirname(base_name) + + if not os.path.exists(archive_dir): + if logger is not None: + logger.info("creating %s", archive_dir) + if not dry_run: + os.makedirs(archive_dir) + + # If zipfile module is not available, try spawning an external 'zip' + # command. + try: + import zipfile + except ImportError: + zipfile = None + + if zipfile is None: + _call_external_zip(base_dir, zip_filename, verbose, dry_run) + else: + if logger is not None: + logger.info("creating '%s' and adding '%s' to it", + zip_filename, base_dir) + + if not dry_run: + zip = zipfile.ZipFile(zip_filename, "w", + compression=zipfile.ZIP_DEFLATED) + + for dirpath, dirnames, filenames in os.walk(base_dir): + for name in filenames: + path = os.path.normpath(os.path.join(dirpath, name)) + if os.path.isfile(path): + zip.write(path, path) + if logger is not None: + logger.info("adding '%s'", path) + zip.close() + + return zip_filename + +_ARCHIVE_FORMATS = { + 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), + 'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), + 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"), + 'zip': (_make_zipfile, [], "ZIP file"), + } + +if _BZ2_SUPPORTED: + _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')], + "bzip2'ed tar-file") + +def get_archive_formats(): + """Returns a list of supported formats for archiving and unarchiving. + + Each element of the returned sequence is a tuple (name, description) + """ + formats = [(name, registry[2]) for name, registry in + _ARCHIVE_FORMATS.items()] + formats.sort() + return formats + +def register_archive_format(name, function, extra_args=None, description=''): + """Registers an archive format. + + name is the name of the format. function is the callable that will be + used to create archives. If provided, extra_args is a sequence of + (name, value) tuples that will be passed as arguments to the callable. + description can be provided to describe the format, and will be returned + by the get_archive_formats() function. + """ + if extra_args is None: + extra_args = [] + if not isinstance(function, Callable): + raise TypeError('The %s object is not callable' % function) + if not isinstance(extra_args, (tuple, list)): + raise TypeError('extra_args needs to be a sequence') + for element in extra_args: + if not isinstance(element, (tuple, list)) or len(element) !=2: + raise TypeError('extra_args elements are : (arg_name, value)') + + _ARCHIVE_FORMATS[name] = (function, extra_args, description) + +def unregister_archive_format(name): + del _ARCHIVE_FORMATS[name] + +def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, + dry_run=0, owner=None, group=None, logger=None): + """Create an archive file (eg. zip or tar). + + 'base_name' is the name of the file to create, minus any format-specific + extension; 'format' is the archive format: one of "zip", "tar", "bztar" + or "gztar". + + 'root_dir' is a directory that will be the root directory of the + archive; ie. we typically chdir into 'root_dir' before creating the + archive. 'base_dir' is the directory where we start archiving from; + ie. 'base_dir' will be the common prefix of all files and + directories in the archive. 'root_dir' and 'base_dir' both default + to the current directory. Returns the name of the archive file. + + 'owner' and 'group' are used when creating a tar archive. By default, + uses the current owner and group. + """ + save_cwd = os.getcwd() + if root_dir is not None: + if logger is not None: + logger.debug("changing into '%s'", root_dir) + base_name = os.path.abspath(base_name) + if not dry_run: + os.chdir(root_dir) + + if base_dir is None: + base_dir = os.curdir + + kwargs = {'dry_run': dry_run, 'logger': logger} + + try: + format_info = _ARCHIVE_FORMATS[format] + except KeyError: + raise ValueError("unknown archive format '%s'" % format) + + func = format_info[0] + for arg, val in format_info[1]: + kwargs[arg] = val + + if format != 'zip': + kwargs['owner'] = owner + kwargs['group'] = group + + try: + filename = func(base_name, base_dir, **kwargs) + finally: + if root_dir is not None: + if logger is not None: + logger.debug("changing back to '%s'", save_cwd) + os.chdir(save_cwd) + + return filename + + +def get_unpack_formats(): + """Returns a list of supported formats for unpacking. + + Each element of the returned sequence is a tuple + (name, extensions, description) + """ + formats = [(name, info[0], info[3]) for name, info in + _UNPACK_FORMATS.items()] + formats.sort() + return formats + +def _check_unpack_options(extensions, function, extra_args): + """Checks what gets registered as an unpacker.""" + # first make sure no other unpacker is registered for this extension + existing_extensions = {} + for name, info in _UNPACK_FORMATS.items(): + for ext in info[0]: + existing_extensions[ext] = name + + for extension in extensions: + if extension in existing_extensions: + msg = '%s is already registered for "%s"' + raise RegistryError(msg % (extension, + existing_extensions[extension])) + + if not isinstance(function, Callable): + raise TypeError('The registered function must be a callable') + + +def register_unpack_format(name, extensions, function, extra_args=None, + description=''): + """Registers an unpack format. + + `name` is the name of the format. `extensions` is a list of extensions + corresponding to the format. + + `function` is the callable that will be + used to unpack archives. The callable will receive archives to unpack. + If it's unable to handle an archive, it needs to raise a ReadError + exception. + + If provided, `extra_args` is a sequence of + (name, value) tuples that will be passed as arguments to the callable. + description can be provided to describe the format, and will be returned + by the get_unpack_formats() function. + """ + if extra_args is None: + extra_args = [] + _check_unpack_options(extensions, function, extra_args) + _UNPACK_FORMATS[name] = extensions, function, extra_args, description + +def unregister_unpack_format(name): + """Removes the pack format from the registry.""" + del _UNPACK_FORMATS[name] + +def _ensure_directory(path): + """Ensure that the parent directory of `path` exists""" + dirname = os.path.dirname(path) + if not os.path.isdir(dirname): + os.makedirs(dirname) + +def _unpack_zipfile(filename, extract_dir): + """Unpack zip `filename` to `extract_dir` + """ + try: + import zipfile + except ImportError: + raise ReadError('zlib not supported, cannot unpack this archive.') + + if not zipfile.is_zipfile(filename): + raise ReadError("%s is not a zip file" % filename) + + zip = zipfile.ZipFile(filename) + try: + for info in zip.infolist(): + name = info.filename + + # don't extract absolute paths or ones with .. in them + if name.startswith('/') or '..' in name: + continue + + target = os.path.join(extract_dir, *name.split('/')) + if not target: + continue + + _ensure_directory(target) + if not name.endswith('/'): + # file + data = zip.read(info.filename) + f = open(target, 'wb') + try: + f.write(data) + finally: + f.close() + del data + finally: + zip.close() + +def _unpack_tarfile(filename, extract_dir): + """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` + """ + try: + tarobj = tarfile.open(filename) + except tarfile.TarError: + raise ReadError( + "%s is not a compressed or uncompressed tar file" % filename) + try: + tarobj.extractall(extract_dir) + finally: + tarobj.close() + +_UNPACK_FORMATS = { + 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"), + 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"), + 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file") + } + +if _BZ2_SUPPORTED: + _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [], + "bzip2'ed tar-file") + +def _find_unpack_format(filename): + for name, info in _UNPACK_FORMATS.items(): + for extension in info[0]: + if filename.endswith(extension): + return name + return None + +def unpack_archive(filename, extract_dir=None, format=None): + """Unpack an archive. + + `filename` is the name of the archive. + + `extract_dir` is the name of the target directory, where the archive + is unpacked. If not provided, the current working directory is used. + + `format` is the archive format: one of "zip", "tar", or "gztar". Or any + other registered format. If not provided, unpack_archive will use the + filename extension and see if an unpacker was registered for that + extension. + + In case none is found, a ValueError is raised. + """ + if extract_dir is None: + extract_dir = os.getcwd() + + if format is not None: + try: + format_info = _UNPACK_FORMATS[format] + except KeyError: + raise ValueError("Unknown unpack format '{0}'".format(format)) + + func = format_info[1] + func(filename, extract_dir, **dict(format_info[2])) + else: + # we need to look at the registered unpackers supported extensions + format = _find_unpack_format(filename) + if format is None: + raise ReadError("Unknown archive format '{0}'".format(filename)) + + func = _UNPACK_FORMATS[format][1] + kwargs = dict(_UNPACK_FORMATS[format][2]) + func(filename, extract_dir, **kwargs) diff --git a/pipenv/patched/notpip/_vendor/distlib/_backport/sysconfig.cfg b/pipenv/patched/notpip/_vendor/distlib/_backport/sysconfig.cfg new file mode 100644 index 0000000000..1746bd01c1 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/distlib/_backport/sysconfig.cfg @@ -0,0 +1,84 @@ +[posix_prefix] +# Configuration directories. Some of these come straight out of the +# configure script. They are for implementing the other variables, not to +# be used directly in [resource_locations]. +confdir = /etc +datadir = /usr/share +libdir = /usr/lib +statedir = /var +# User resource directory +local = ~/.local/{distribution.name} + +stdlib = {base}/lib/python{py_version_short} +platstdlib = {platbase}/lib/python{py_version_short} +purelib = {base}/lib/python{py_version_short}/site-packages +platlib = {platbase}/lib/python{py_version_short}/site-packages +include = {base}/include/python{py_version_short}{abiflags} +platinclude = {platbase}/include/python{py_version_short}{abiflags} +data = {base} + +[posix_home] +stdlib = {base}/lib/python +platstdlib = {base}/lib/python +purelib = {base}/lib/python +platlib = {base}/lib/python +include = {base}/include/python +platinclude = {base}/include/python +scripts = {base}/bin +data = {base} + +[nt] +stdlib = {base}/Lib +platstdlib = {base}/Lib +purelib = {base}/Lib/site-packages +platlib = {base}/Lib/site-packages +include = {base}/Include +platinclude = {base}/Include +scripts = {base}/Scripts +data = {base} + +[os2] +stdlib = {base}/Lib +platstdlib = {base}/Lib +purelib = {base}/Lib/site-packages +platlib = {base}/Lib/site-packages +include = {base}/Include +platinclude = {base}/Include +scripts = {base}/Scripts +data = {base} + +[os2_home] +stdlib = {userbase}/lib/python{py_version_short} +platstdlib = {userbase}/lib/python{py_version_short} +purelib = {userbase}/lib/python{py_version_short}/site-packages +platlib = {userbase}/lib/python{py_version_short}/site-packages +include = {userbase}/include/python{py_version_short} +scripts = {userbase}/bin +data = {userbase} + +[nt_user] +stdlib = {userbase}/Python{py_version_nodot} +platstdlib = {userbase}/Python{py_version_nodot} +purelib = {userbase}/Python{py_version_nodot}/site-packages +platlib = {userbase}/Python{py_version_nodot}/site-packages +include = {userbase}/Python{py_version_nodot}/Include +scripts = {userbase}/Scripts +data = {userbase} + +[posix_user] +stdlib = {userbase}/lib/python{py_version_short} +platstdlib = {userbase}/lib/python{py_version_short} +purelib = {userbase}/lib/python{py_version_short}/site-packages +platlib = {userbase}/lib/python{py_version_short}/site-packages +include = {userbase}/include/python{py_version_short} +scripts = {userbase}/bin +data = {userbase} + +[osx_framework_user] +stdlib = {userbase}/lib/python +platstdlib = {userbase}/lib/python +purelib = {userbase}/lib/python/site-packages +platlib = {userbase}/lib/python/site-packages +include = {userbase}/include +scripts = {userbase}/bin +data = {userbase} diff --git a/pipenv/patched/notpip/_vendor/distlib/_backport/sysconfig.py b/pipenv/patched/notpip/_vendor/distlib/_backport/sysconfig.py new file mode 100644 index 0000000000..b470a373c8 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/distlib/_backport/sysconfig.py @@ -0,0 +1,786 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Access to Python's configuration information.""" + +import codecs +import os +import re +import sys +from os.path import pardir, realpath +try: + import configparser +except ImportError: + import ConfigParser as configparser + + +__all__ = [ + 'get_config_h_filename', + 'get_config_var', + 'get_config_vars', + 'get_makefile_filename', + 'get_path', + 'get_path_names', + 'get_paths', + 'get_platform', + 'get_python_version', + 'get_scheme_names', + 'parse_config_h', +] + + +def _safe_realpath(path): + try: + return realpath(path) + except OSError: + return path + + +if sys.executable: + _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable)) +else: + # sys.executable can be empty if argv[0] has been changed and Python is + # unable to retrieve the real program name + _PROJECT_BASE = _safe_realpath(os.getcwd()) + +if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir)) +# PC/VS7.1 +if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) +# PC/AMD64 +if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) + + +def is_python_build(): + for fn in ("Setup.dist", "Setup.local"): + if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)): + return True + return False + +_PYTHON_BUILD = is_python_build() + +_cfg_read = False + +def _ensure_cfg_read(): + global _cfg_read + if not _cfg_read: + from ..resources import finder + backport_package = __name__.rsplit('.', 1)[0] + _finder = finder(backport_package) + _cfgfile = _finder.find('sysconfig.cfg') + assert _cfgfile, 'sysconfig.cfg exists' + with _cfgfile.as_stream() as s: + _SCHEMES.readfp(s) + if _PYTHON_BUILD: + for scheme in ('posix_prefix', 'posix_home'): + _SCHEMES.set(scheme, 'include', '{srcdir}/Include') + _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.') + + _cfg_read = True + + +_SCHEMES = configparser.RawConfigParser() +_VAR_REPL = re.compile(r'\{([^{]*?)\}') + +def _expand_globals(config): + _ensure_cfg_read() + if config.has_section('globals'): + globals = config.items('globals') + else: + globals = tuple() + + sections = config.sections() + for section in sections: + if section == 'globals': + continue + for option, value in globals: + if config.has_option(section, option): + continue + config.set(section, option, value) + config.remove_section('globals') + + # now expanding local variables defined in the cfg file + # + for section in config.sections(): + variables = dict(config.items(section)) + + def _replacer(matchobj): + name = matchobj.group(1) + if name in variables: + return variables[name] + return matchobj.group(0) + + for option, value in config.items(section): + config.set(section, option, _VAR_REPL.sub(_replacer, value)) + +#_expand_globals(_SCHEMES) + +_PY_VERSION = '%s.%s.%s' % sys.version_info[:3] +_PY_VERSION_SHORT = '%s.%s' % sys.version_info[:2] +_PY_VERSION_SHORT_NO_DOT = '%s%s' % sys.version_info[:2] +_PREFIX = os.path.normpath(sys.prefix) +_EXEC_PREFIX = os.path.normpath(sys.exec_prefix) +_CONFIG_VARS = None +_USER_BASE = None + + +def _subst_vars(path, local_vars): + """In the string `path`, replace tokens like {some.thing} with the + corresponding value from the map `local_vars`. + + If there is no corresponding value, leave the token unchanged. + """ + def _replacer(matchobj): + name = matchobj.group(1) + if name in local_vars: + return local_vars[name] + elif name in os.environ: + return os.environ[name] + return matchobj.group(0) + return _VAR_REPL.sub(_replacer, path) + + +def _extend_dict(target_dict, other_dict): + target_keys = target_dict.keys() + for key, value in other_dict.items(): + if key in target_keys: + continue + target_dict[key] = value + + +def _expand_vars(scheme, vars): + res = {} + if vars is None: + vars = {} + _extend_dict(vars, get_config_vars()) + + for key, value in _SCHEMES.items(scheme): + if os.name in ('posix', 'nt'): + value = os.path.expanduser(value) + res[key] = os.path.normpath(_subst_vars(value, vars)) + return res + + +def format_value(value, vars): + def _replacer(matchobj): + name = matchobj.group(1) + if name in vars: + return vars[name] + return matchobj.group(0) + return _VAR_REPL.sub(_replacer, value) + + +def _get_default_scheme(): + if os.name == 'posix': + # the default scheme for posix is posix_prefix + return 'posix_prefix' + return os.name + + +def _getuserbase(): + env_base = os.environ.get("PYTHONUSERBASE", None) + + def joinuser(*args): + return os.path.expanduser(os.path.join(*args)) + + # what about 'os2emx', 'riscos' ? + if os.name == "nt": + base = os.environ.get("APPDATA") or "~" + if env_base: + return env_base + else: + return joinuser(base, "Python") + + if sys.platform == "darwin": + framework = get_config_var("PYTHONFRAMEWORK") + if framework: + if env_base: + return env_base + else: + return joinuser("~", "Library", framework, "%d.%d" % + sys.version_info[:2]) + + if env_base: + return env_base + else: + return joinuser("~", ".local") + + +def _parse_makefile(filename, vars=None): + """Parse a Makefile-style file. + + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + # Regexes needed for parsing Makefile (and similar syntaxes, + # like old-style Setup files). + _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") + _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") + _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") + + if vars is None: + vars = {} + done = {} + notdone = {} + + with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f: + lines = f.readlines() + + for line in lines: + if line.startswith('#') or line.strip() == '': + continue + m = _variable_rx.match(line) + if m: + n, v = m.group(1, 2) + v = v.strip() + # `$$' is a literal `$' in make + tmpv = v.replace('$$', '') + + if "$" in tmpv: + notdone[n] = v + else: + try: + v = int(v) + except ValueError: + # insert literal `$' + done[n] = v.replace('$$', '$') + else: + done[n] = v + + # do variable interpolation here + variables = list(notdone.keys()) + + # Variables with a 'PY_' prefix in the makefile. These need to + # be made available without that prefix through sysconfig. + # Special care is needed to ensure that variable expansion works, even + # if the expansion uses the name without a prefix. + renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') + + while len(variables) > 0: + for name in tuple(variables): + value = notdone[name] + m = _findvar1_rx.search(value) or _findvar2_rx.search(value) + if m is not None: + n = m.group(1) + found = True + if n in done: + item = str(done[n]) + elif n in notdone: + # get it on a subsequent round + found = False + elif n in os.environ: + # do it like make: fall back to environment + item = os.environ[n] + + elif n in renamed_variables: + if (name.startswith('PY_') and + name[3:] in renamed_variables): + item = "" + + elif 'PY_' + n in notdone: + found = False + + else: + item = str(done['PY_' + n]) + + else: + done[n] = item = "" + + if found: + after = value[m.end():] + value = value[:m.start()] + item + after + if "$" in after: + notdone[name] = value + else: + try: + value = int(value) + except ValueError: + done[name] = value.strip() + else: + done[name] = value + variables.remove(name) + + if (name.startswith('PY_') and + name[3:] in renamed_variables): + + name = name[3:] + if name not in done: + done[name] = value + + else: + # bogus variable reference (e.g. "prefix=$/opt/python"); + # just drop it since we can't deal + done[name] = value + variables.remove(name) + + # strip spurious spaces + for k, v in done.items(): + if isinstance(v, str): + done[k] = v.strip() + + # save the results in the global dictionary + vars.update(done) + return vars + + +def get_makefile_filename(): + """Return the path of the Makefile.""" + if _PYTHON_BUILD: + return os.path.join(_PROJECT_BASE, "Makefile") + if hasattr(sys, 'abiflags'): + config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags) + else: + config_dir_name = 'config' + return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile') + + +def _init_posix(vars): + """Initialize the module as appropriate for POSIX systems.""" + # load the installed Makefile: + makefile = get_makefile_filename() + try: + _parse_makefile(makefile, vars) + except IOError as e: + msg = "invalid Python installation: unable to open %s" % makefile + if hasattr(e, "strerror"): + msg = msg + " (%s)" % e.strerror + raise IOError(msg) + # load the installed pyconfig.h: + config_h = get_config_h_filename() + try: + with open(config_h) as f: + parse_config_h(f, vars) + except IOError as e: + msg = "invalid Python installation: unable to open %s" % config_h + if hasattr(e, "strerror"): + msg = msg + " (%s)" % e.strerror + raise IOError(msg) + # On AIX, there are wrong paths to the linker scripts in the Makefile + # -- these paths are relative to the Python source, but when installed + # the scripts are in another directory. + if _PYTHON_BUILD: + vars['LDSHARED'] = vars['BLDSHARED'] + + +def _init_non_posix(vars): + """Initialize the module as appropriate for NT""" + # set basic install directories + vars['LIBDEST'] = get_path('stdlib') + vars['BINLIBDEST'] = get_path('platstdlib') + vars['INCLUDEPY'] = get_path('include') + vars['SO'] = '.pyd' + vars['EXE'] = '.exe' + vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT + vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable)) + +# +# public APIs +# + + +def parse_config_h(fp, vars=None): + """Parse a config.h-style file. + + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + if vars is None: + vars = {} + define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n") + undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n") + + while True: + line = fp.readline() + if not line: + break + m = define_rx.match(line) + if m: + n, v = m.group(1, 2) + try: + v = int(v) + except ValueError: + pass + vars[n] = v + else: + m = undef_rx.match(line) + if m: + vars[m.group(1)] = 0 + return vars + + +def get_config_h_filename(): + """Return the path of pyconfig.h.""" + if _PYTHON_BUILD: + if os.name == "nt": + inc_dir = os.path.join(_PROJECT_BASE, "PC") + else: + inc_dir = _PROJECT_BASE + else: + inc_dir = get_path('platinclude') + return os.path.join(inc_dir, 'pyconfig.h') + + +def get_scheme_names(): + """Return a tuple containing the schemes names.""" + return tuple(sorted(_SCHEMES.sections())) + + +def get_path_names(): + """Return a tuple containing the paths names.""" + # xxx see if we want a static list + return _SCHEMES.options('posix_prefix') + + +def get_paths(scheme=_get_default_scheme(), vars=None, expand=True): + """Return a mapping containing an install scheme. + + ``scheme`` is the install scheme name. If not provided, it will + return the default scheme for the current platform. + """ + _ensure_cfg_read() + if expand: + return _expand_vars(scheme, vars) + else: + return dict(_SCHEMES.items(scheme)) + + +def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True): + """Return a path corresponding to the scheme. + + ``scheme`` is the install scheme name. + """ + return get_paths(scheme, vars, expand)[name] + + +def get_config_vars(*args): + """With no arguments, return a dictionary of all configuration + variables relevant for the current platform. + + On Unix, this means every variable defined in Python's installed Makefile; + On Windows and Mac OS it's a much smaller set. + + With arguments, return a list of values that result from looking up + each argument in the configuration variable dictionary. + """ + global _CONFIG_VARS + if _CONFIG_VARS is None: + _CONFIG_VARS = {} + # Normalized versions of prefix and exec_prefix are handy to have; + # in fact, these are the standard versions used most places in the + # distutils2 module. + _CONFIG_VARS['prefix'] = _PREFIX + _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX + _CONFIG_VARS['py_version'] = _PY_VERSION + _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT + _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2] + _CONFIG_VARS['base'] = _PREFIX + _CONFIG_VARS['platbase'] = _EXEC_PREFIX + _CONFIG_VARS['projectbase'] = _PROJECT_BASE + try: + _CONFIG_VARS['abiflags'] = sys.abiflags + except AttributeError: + # sys.abiflags may not be defined on all platforms. + _CONFIG_VARS['abiflags'] = '' + + if os.name in ('nt', 'os2'): + _init_non_posix(_CONFIG_VARS) + if os.name == 'posix': + _init_posix(_CONFIG_VARS) + # Setting 'userbase' is done below the call to the + # init function to enable using 'get_config_var' in + # the init-function. + if sys.version >= '2.6': + _CONFIG_VARS['userbase'] = _getuserbase() + + if 'srcdir' not in _CONFIG_VARS: + _CONFIG_VARS['srcdir'] = _PROJECT_BASE + else: + _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir']) + + # Convert srcdir into an absolute path if it appears necessary. + # Normally it is relative to the build directory. However, during + # testing, for example, we might be running a non-installed python + # from a different directory. + if _PYTHON_BUILD and os.name == "posix": + base = _PROJECT_BASE + try: + cwd = os.getcwd() + except OSError: + cwd = None + if (not os.path.isabs(_CONFIG_VARS['srcdir']) and + base != cwd): + # srcdir is relative and we are not in the same directory + # as the executable. Assume executable is in the build + # directory and make srcdir absolute. + srcdir = os.path.join(base, _CONFIG_VARS['srcdir']) + _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir) + + if sys.platform == 'darwin': + kernel_version = os.uname()[2] # Kernel version (8.4.3) + major_version = int(kernel_version.split('.')[0]) + + if major_version < 8: + # On Mac OS X before 10.4, check if -arch and -isysroot + # are in CFLAGS or LDFLAGS and remove them if they are. + # This is needed when building extensions on a 10.3 system + # using a universal build of python. + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + flags = _CONFIG_VARS[key] + flags = re.sub(r'-arch\s+\w+\s', ' ', flags) + flags = re.sub('-isysroot [^ \t]*', ' ', flags) + _CONFIG_VARS[key] = flags + else: + # Allow the user to override the architecture flags using + # an environment variable. + # NOTE: This name was introduced by Apple in OSX 10.5 and + # is used by several scripting languages distributed with + # that OS release. + if 'ARCHFLAGS' in os.environ: + arch = os.environ['ARCHFLAGS'] + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + + flags = _CONFIG_VARS[key] + flags = re.sub(r'-arch\s+\w+\s', ' ', flags) + flags = flags + ' ' + arch + _CONFIG_VARS[key] = flags + + # If we're on OSX 10.5 or later and the user tries to + # compiles an extension using an SDK that is not present + # on the current machine it is better to not use an SDK + # than to fail. + # + # The major usecase for this is users using a Python.org + # binary installer on OSX 10.6: that installer uses + # the 10.4u SDK, but that SDK is not installed by default + # when you install Xcode. + # + CFLAGS = _CONFIG_VARS.get('CFLAGS', '') + m = re.search(r'-isysroot\s+(\S+)', CFLAGS) + if m is not None: + sdk = m.group(1) + if not os.path.exists(sdk): + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + + flags = _CONFIG_VARS[key] + flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags) + _CONFIG_VARS[key] = flags + + if args: + vals = [] + for name in args: + vals.append(_CONFIG_VARS.get(name)) + return vals + else: + return _CONFIG_VARS + + +def get_config_var(name): + """Return the value of a single variable using the dictionary returned by + 'get_config_vars()'. + + Equivalent to get_config_vars().get(name) + """ + return get_config_vars().get(name) + + +def get_platform(): + """Return a string that identifies the current platform. + + This is used mainly to distinguish platform-specific build directories and + platform-specific built distributions. Typically includes the OS name + and version and the architecture (as supplied by 'os.uname()'), + although the exact information included depends on the OS; eg. for IRIX + the architecture isn't particularly important (IRIX only runs on SGI + hardware), but for Linux the kernel version isn't particularly + important. + + Examples of returned values: + linux-i586 + linux-alpha (?) + solaris-2.6-sun4u + irix-5.3 + irix64-6.2 + + Windows will return one of: + win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win-ia64 (64bit Windows on Itanium) + win32 (all others - specifically, sys.platform is returned) + + For other non-POSIX platforms, currently just returns 'sys.platform'. + """ + if os.name == 'nt': + # sniff sys.version for architecture. + prefix = " bit (" + i = sys.version.find(prefix) + if i == -1: + return sys.platform + j = sys.version.find(")", i) + look = sys.version[i+len(prefix):j].lower() + if look == 'amd64': + return 'win-amd64' + if look == 'itanium': + return 'win-ia64' + return sys.platform + + if os.name != "posix" or not hasattr(os, 'uname'): + # XXX what about the architecture? NT is Intel or Alpha, + # Mac OS is M68k or PPC, etc. + return sys.platform + + # Try to distinguish various flavours of Unix + osname, host, release, version, machine = os.uname() + + # Convert the OS name to lowercase, remove '/' characters + # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh") + osname = osname.lower().replace('/', '') + machine = machine.replace(' ', '_') + machine = machine.replace('/', '-') + + if osname[:5] == "linux": + # At least on Linux/Intel, 'machine' is the processor -- + # i386, etc. + # XXX what about Alpha, SPARC, etc? + return "%s-%s" % (osname, machine) + elif osname[:5] == "sunos": + if release[0] >= "5": # SunOS 5 == Solaris 2 + osname = "solaris" + release = "%d.%s" % (int(release[0]) - 3, release[2:]) + # fall through to standard osname-release-machine representation + elif osname[:4] == "irix": # could be "irix64"! + return "%s-%s" % (osname, release) + elif osname[:3] == "aix": + return "%s-%s.%s" % (osname, version, release) + elif osname[:6] == "cygwin": + osname = "cygwin" + rel_re = re.compile(r'[\d.]+') + m = rel_re.match(release) + if m: + release = m.group() + elif osname[:6] == "darwin": + # + # For our purposes, we'll assume that the system version from + # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set + # to. This makes the compatibility story a bit more sane because the + # machine is going to compile and link as if it were + # MACOSX_DEPLOYMENT_TARGET. + cfgvars = get_config_vars() + macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET') + + if True: + # Always calculate the release of the running machine, + # needed to determine if we can build fat binaries or not. + + macrelease = macver + # Get the system version. Reading this plist is a documented + # way to get the system version (see the documentation for + # the Gestalt Manager) + try: + f = open('/System/Library/CoreServices/SystemVersion.plist') + except IOError: + # We're on a plain darwin box, fall back to the default + # behaviour. + pass + else: + try: + m = re.search(r'ProductUserVisibleVersion\s*' + r'(.*?)', f.read()) + finally: + f.close() + if m is not None: + macrelease = '.'.join(m.group(1).split('.')[:2]) + # else: fall back to the default behaviour + + if not macver: + macver = macrelease + + if macver: + release = macver + osname = "macosx" + + if ((macrelease + '.') >= '10.4.' and + '-arch' in get_config_vars().get('CFLAGS', '').strip()): + # The universal build will build fat binaries, but not on + # systems before 10.4 + # + # Try to detect 4-way universal builds, those have machine-type + # 'universal' instead of 'fat'. + + machine = 'fat' + cflags = get_config_vars().get('CFLAGS') + + archs = re.findall(r'-arch\s+(\S+)', cflags) + archs = tuple(sorted(set(archs))) + + if len(archs) == 1: + machine = archs[0] + elif archs == ('i386', 'ppc'): + machine = 'fat' + elif archs == ('i386', 'x86_64'): + machine = 'intel' + elif archs == ('i386', 'ppc', 'x86_64'): + machine = 'fat3' + elif archs == ('ppc64', 'x86_64'): + machine = 'fat64' + elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'): + machine = 'universal' + else: + raise ValueError( + "Don't know machine value for archs=%r" % (archs,)) + + elif machine == 'i386': + # On OSX the machine type returned by uname is always the + # 32-bit variant, even if the executable architecture is + # the 64-bit variant + if sys.maxsize >= 2**32: + machine = 'x86_64' + + elif machine in ('PowerPC', 'Power_Macintosh'): + # Pick a sane name for the PPC architecture. + # See 'i386' case + if sys.maxsize >= 2**32: + machine = 'ppc64' + else: + machine = 'ppc' + + return "%s-%s-%s" % (osname, release, machine) + + +def get_python_version(): + return _PY_VERSION_SHORT + + +def _print_dict(title, data): + for index, (key, value) in enumerate(sorted(data.items())): + if index == 0: + print('%s: ' % (title)) + print('\t%s = "%s"' % (key, value)) + + +def _main(): + """Display all information sysconfig detains.""" + print('Platform: "%s"' % get_platform()) + print('Python version: "%s"' % get_python_version()) + print('Current installation scheme: "%s"' % _get_default_scheme()) + print() + _print_dict('Paths', get_paths()) + print() + _print_dict('Variables', get_config_vars()) + + +if __name__ == '__main__': + _main() diff --git a/pipenv/patched/notpip/_vendor/distlib/_backport/tarfile.py b/pipenv/patched/notpip/_vendor/distlib/_backport/tarfile.py new file mode 100644 index 0000000000..d66d856637 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/distlib/_backport/tarfile.py @@ -0,0 +1,2607 @@ +#------------------------------------------------------------------- +# tarfile.py +#------------------------------------------------------------------- +# Copyright (C) 2002 Lars Gustaebel +# All rights reserved. +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +from __future__ import print_function + +"""Read from and write to tar format archives. +""" + +__version__ = "$Revision$" + +version = "0.9.0" +__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)" +__date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $" +__cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $" +__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend." + +#--------- +# Imports +#--------- +import sys +import os +import stat +import errno +import time +import struct +import copy +import re + +try: + import grp, pwd +except ImportError: + grp = pwd = None + +# os.symlink on Windows prior to 6.0 raises NotImplementedError +symlink_exception = (AttributeError, NotImplementedError) +try: + # WindowsError (1314) will be raised if the caller does not hold the + # SeCreateSymbolicLinkPrivilege privilege + symlink_exception += (WindowsError,) +except NameError: + pass + +# from tarfile import * +__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"] + +if sys.version_info[0] < 3: + import __builtin__ as builtins +else: + import builtins + +_open = builtins.open # Since 'open' is TarFile.open + +#--------------------------------------------------------- +# tar constants +#--------------------------------------------------------- +NUL = b"\0" # the null character +BLOCKSIZE = 512 # length of processing blocks +RECORDSIZE = BLOCKSIZE * 20 # length of records +GNU_MAGIC = b"ustar \0" # magic gnu tar string +POSIX_MAGIC = b"ustar\x0000" # magic posix tar string + +LENGTH_NAME = 100 # maximum length of a filename +LENGTH_LINK = 100 # maximum length of a linkname +LENGTH_PREFIX = 155 # maximum length of the prefix field + +REGTYPE = b"0" # regular file +AREGTYPE = b"\0" # regular file +LNKTYPE = b"1" # link (inside tarfile) +SYMTYPE = b"2" # symbolic link +CHRTYPE = b"3" # character special device +BLKTYPE = b"4" # block special device +DIRTYPE = b"5" # directory +FIFOTYPE = b"6" # fifo special device +CONTTYPE = b"7" # contiguous file + +GNUTYPE_LONGNAME = b"L" # GNU tar longname +GNUTYPE_LONGLINK = b"K" # GNU tar longlink +GNUTYPE_SPARSE = b"S" # GNU tar sparse file + +XHDTYPE = b"x" # POSIX.1-2001 extended header +XGLTYPE = b"g" # POSIX.1-2001 global header +SOLARIS_XHDTYPE = b"X" # Solaris extended header + +USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format +GNU_FORMAT = 1 # GNU tar format +PAX_FORMAT = 2 # POSIX.1-2001 (pax) format +DEFAULT_FORMAT = GNU_FORMAT + +#--------------------------------------------------------- +# tarfile constants +#--------------------------------------------------------- +# File types that tarfile supports: +SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE, + SYMTYPE, DIRTYPE, FIFOTYPE, + CONTTYPE, CHRTYPE, BLKTYPE, + GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, + GNUTYPE_SPARSE) + +# File types that will be treated as a regular file. +REGULAR_TYPES = (REGTYPE, AREGTYPE, + CONTTYPE, GNUTYPE_SPARSE) + +# File types that are part of the GNU tar format. +GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, + GNUTYPE_SPARSE) + +# Fields from a pax header that override a TarInfo attribute. +PAX_FIELDS = ("path", "linkpath", "size", "mtime", + "uid", "gid", "uname", "gname") + +# Fields from a pax header that are affected by hdrcharset. +PAX_NAME_FIELDS = set(("path", "linkpath", "uname", "gname")) + +# Fields in a pax header that are numbers, all other fields +# are treated as strings. +PAX_NUMBER_FIELDS = { + "atime": float, + "ctime": float, + "mtime": float, + "uid": int, + "gid": int, + "size": int +} + +#--------------------------------------------------------- +# Bits used in the mode field, values in octal. +#--------------------------------------------------------- +S_IFLNK = 0o120000 # symbolic link +S_IFREG = 0o100000 # regular file +S_IFBLK = 0o060000 # block device +S_IFDIR = 0o040000 # directory +S_IFCHR = 0o020000 # character device +S_IFIFO = 0o010000 # fifo + +TSUID = 0o4000 # set UID on execution +TSGID = 0o2000 # set GID on execution +TSVTX = 0o1000 # reserved + +TUREAD = 0o400 # read by owner +TUWRITE = 0o200 # write by owner +TUEXEC = 0o100 # execute/search by owner +TGREAD = 0o040 # read by group +TGWRITE = 0o020 # write by group +TGEXEC = 0o010 # execute/search by group +TOREAD = 0o004 # read by other +TOWRITE = 0o002 # write by other +TOEXEC = 0o001 # execute/search by other + +#--------------------------------------------------------- +# initialization +#--------------------------------------------------------- +if os.name in ("nt", "ce"): + ENCODING = "utf-8" +else: + ENCODING = sys.getfilesystemencoding() + +#--------------------------------------------------------- +# Some useful functions +#--------------------------------------------------------- + +def stn(s, length, encoding, errors): + """Convert a string to a null-terminated bytes object. + """ + s = s.encode(encoding, errors) + return s[:length] + (length - len(s)) * NUL + +def nts(s, encoding, errors): + """Convert a null-terminated bytes object to a string. + """ + p = s.find(b"\0") + if p != -1: + s = s[:p] + return s.decode(encoding, errors) + +def nti(s): + """Convert a number field to a python number. + """ + # There are two possible encodings for a number field, see + # itn() below. + if s[0] != chr(0o200): + try: + n = int(nts(s, "ascii", "strict") or "0", 8) + except ValueError: + raise InvalidHeaderError("invalid header") + else: + n = 0 + for i in range(len(s) - 1): + n <<= 8 + n += ord(s[i + 1]) + return n + +def itn(n, digits=8, format=DEFAULT_FORMAT): + """Convert a python number to a number field. + """ + # POSIX 1003.1-1988 requires numbers to be encoded as a string of + # octal digits followed by a null-byte, this allows values up to + # (8**(digits-1))-1. GNU tar allows storing numbers greater than + # that if necessary. A leading 0o200 byte indicates this particular + # encoding, the following digits-1 bytes are a big-endian + # representation. This allows values up to (256**(digits-1))-1. + if 0 <= n < 8 ** (digits - 1): + s = ("%0*o" % (digits - 1, n)).encode("ascii") + NUL + else: + if format != GNU_FORMAT or n >= 256 ** (digits - 1): + raise ValueError("overflow in number field") + + if n < 0: + # XXX We mimic GNU tar's behaviour with negative numbers, + # this could raise OverflowError. + n = struct.unpack("L", struct.pack("l", n))[0] + + s = bytearray() + for i in range(digits - 1): + s.insert(0, n & 0o377) + n >>= 8 + s.insert(0, 0o200) + return s + +def calc_chksums(buf): + """Calculate the checksum for a member's header by summing up all + characters except for the chksum field which is treated as if + it was filled with spaces. According to the GNU tar sources, + some tars (Sun and NeXT) calculate chksum with signed char, + which will be different if there are chars in the buffer with + the high bit set. So we calculate two checksums, unsigned and + signed. + """ + unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512])) + signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512])) + return unsigned_chksum, signed_chksum + +def copyfileobj(src, dst, length=None): + """Copy length bytes from fileobj src to fileobj dst. + If length is None, copy the entire content. + """ + if length == 0: + return + if length is None: + while True: + buf = src.read(16*1024) + if not buf: + break + dst.write(buf) + return + + BUFSIZE = 16 * 1024 + blocks, remainder = divmod(length, BUFSIZE) + for b in range(blocks): + buf = src.read(BUFSIZE) + if len(buf) < BUFSIZE: + raise IOError("end of file reached") + dst.write(buf) + + if remainder != 0: + buf = src.read(remainder) + if len(buf) < remainder: + raise IOError("end of file reached") + dst.write(buf) + return + +filemode_table = ( + ((S_IFLNK, "l"), + (S_IFREG, "-"), + (S_IFBLK, "b"), + (S_IFDIR, "d"), + (S_IFCHR, "c"), + (S_IFIFO, "p")), + + ((TUREAD, "r"),), + ((TUWRITE, "w"),), + ((TUEXEC|TSUID, "s"), + (TSUID, "S"), + (TUEXEC, "x")), + + ((TGREAD, "r"),), + ((TGWRITE, "w"),), + ((TGEXEC|TSGID, "s"), + (TSGID, "S"), + (TGEXEC, "x")), + + ((TOREAD, "r"),), + ((TOWRITE, "w"),), + ((TOEXEC|TSVTX, "t"), + (TSVTX, "T"), + (TOEXEC, "x")) +) + +def filemode(mode): + """Convert a file's mode to a string of the form + -rwxrwxrwx. + Used by TarFile.list() + """ + perm = [] + for table in filemode_table: + for bit, char in table: + if mode & bit == bit: + perm.append(char) + break + else: + perm.append("-") + return "".join(perm) + +class TarError(Exception): + """Base exception.""" + pass +class ExtractError(TarError): + """General exception for extract errors.""" + pass +class ReadError(TarError): + """Exception for unreadable tar archives.""" + pass +class CompressionError(TarError): + """Exception for unavailable compression methods.""" + pass +class StreamError(TarError): + """Exception for unsupported operations on stream-like TarFiles.""" + pass +class HeaderError(TarError): + """Base exception for header errors.""" + pass +class EmptyHeaderError(HeaderError): + """Exception for empty headers.""" + pass +class TruncatedHeaderError(HeaderError): + """Exception for truncated headers.""" + pass +class EOFHeaderError(HeaderError): + """Exception for end of file headers.""" + pass +class InvalidHeaderError(HeaderError): + """Exception for invalid headers.""" + pass +class SubsequentHeaderError(HeaderError): + """Exception for missing and invalid extended headers.""" + pass + +#--------------------------- +# internal stream interface +#--------------------------- +class _LowLevelFile(object): + """Low-level file object. Supports reading and writing. + It is used instead of a regular file object for streaming + access. + """ + + def __init__(self, name, mode): + mode = { + "r": os.O_RDONLY, + "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC, + }[mode] + if hasattr(os, "O_BINARY"): + mode |= os.O_BINARY + self.fd = os.open(name, mode, 0o666) + + def close(self): + os.close(self.fd) + + def read(self, size): + return os.read(self.fd, size) + + def write(self, s): + os.write(self.fd, s) + +class _Stream(object): + """Class that serves as an adapter between TarFile and + a stream-like object. The stream-like object only + needs to have a read() or write() method and is accessed + blockwise. Use of gzip or bzip2 compression is possible. + A stream-like object could be for example: sys.stdin, + sys.stdout, a socket, a tape device etc. + + _Stream is intended to be used only internally. + """ + + def __init__(self, name, mode, comptype, fileobj, bufsize): + """Construct a _Stream object. + """ + self._extfileobj = True + if fileobj is None: + fileobj = _LowLevelFile(name, mode) + self._extfileobj = False + + if comptype == '*': + # Enable transparent compression detection for the + # stream interface + fileobj = _StreamProxy(fileobj) + comptype = fileobj.getcomptype() + + self.name = name or "" + self.mode = mode + self.comptype = comptype + self.fileobj = fileobj + self.bufsize = bufsize + self.buf = b"" + self.pos = 0 + self.closed = False + + try: + if comptype == "gz": + try: + import zlib + except ImportError: + raise CompressionError("zlib module is not available") + self.zlib = zlib + self.crc = zlib.crc32(b"") + if mode == "r": + self._init_read_gz() + else: + self._init_write_gz() + + if comptype == "bz2": + try: + import bz2 + except ImportError: + raise CompressionError("bz2 module is not available") + if mode == "r": + self.dbuf = b"" + self.cmp = bz2.BZ2Decompressor() + else: + self.cmp = bz2.BZ2Compressor() + except: + if not self._extfileobj: + self.fileobj.close() + self.closed = True + raise + + def __del__(self): + if hasattr(self, "closed") and not self.closed: + self.close() + + def _init_write_gz(self): + """Initialize for writing with gzip compression. + """ + self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED, + -self.zlib.MAX_WBITS, + self.zlib.DEF_MEM_LEVEL, + 0) + timestamp = struct.pack(" self.bufsize: + self.fileobj.write(self.buf[:self.bufsize]) + self.buf = self.buf[self.bufsize:] + + def close(self): + """Close the _Stream object. No operation should be + done on it afterwards. + """ + if self.closed: + return + + if self.mode == "w" and self.comptype != "tar": + self.buf += self.cmp.flush() + + if self.mode == "w" and self.buf: + self.fileobj.write(self.buf) + self.buf = b"" + if self.comptype == "gz": + # The native zlib crc is an unsigned 32-bit integer, but + # the Python wrapper implicitly casts that to a signed C + # long. So, on a 32-bit box self.crc may "look negative", + # while the same crc on a 64-bit box may "look positive". + # To avoid irksome warnings from the `struct` module, force + # it to look positive on all boxes. + self.fileobj.write(struct.pack("= 0: + blocks, remainder = divmod(pos - self.pos, self.bufsize) + for i in range(blocks): + self.read(self.bufsize) + self.read(remainder) + else: + raise StreamError("seeking backwards is not allowed") + return self.pos + + def read(self, size=None): + """Return the next size number of bytes from the stream. + If size is not defined, return all bytes of the stream + up to EOF. + """ + if size is None: + t = [] + while True: + buf = self._read(self.bufsize) + if not buf: + break + t.append(buf) + buf = "".join(t) + else: + buf = self._read(size) + self.pos += len(buf) + return buf + + def _read(self, size): + """Return size bytes from the stream. + """ + if self.comptype == "tar": + return self.__read(size) + + c = len(self.dbuf) + while c < size: + buf = self.__read(self.bufsize) + if not buf: + break + try: + buf = self.cmp.decompress(buf) + except IOError: + raise ReadError("invalid compressed data") + self.dbuf += buf + c += len(buf) + buf = self.dbuf[:size] + self.dbuf = self.dbuf[size:] + return buf + + def __read(self, size): + """Return size bytes from stream. If internal buffer is empty, + read another block from the stream. + """ + c = len(self.buf) + while c < size: + buf = self.fileobj.read(self.bufsize) + if not buf: + break + self.buf += buf + c += len(buf) + buf = self.buf[:size] + self.buf = self.buf[size:] + return buf +# class _Stream + +class _StreamProxy(object): + """Small proxy class that enables transparent compression + detection for the Stream interface (mode 'r|*'). + """ + + def __init__(self, fileobj): + self.fileobj = fileobj + self.buf = self.fileobj.read(BLOCKSIZE) + + def read(self, size): + self.read = self.fileobj.read + return self.buf + + def getcomptype(self): + if self.buf.startswith(b"\037\213\010"): + return "gz" + if self.buf.startswith(b"BZh91"): + return "bz2" + return "tar" + + def close(self): + self.fileobj.close() +# class StreamProxy + +class _BZ2Proxy(object): + """Small proxy class that enables external file object + support for "r:bz2" and "w:bz2" modes. This is actually + a workaround for a limitation in bz2 module's BZ2File + class which (unlike gzip.GzipFile) has no support for + a file object argument. + """ + + blocksize = 16 * 1024 + + def __init__(self, fileobj, mode): + self.fileobj = fileobj + self.mode = mode + self.name = getattr(self.fileobj, "name", None) + self.init() + + def init(self): + import bz2 + self.pos = 0 + if self.mode == "r": + self.bz2obj = bz2.BZ2Decompressor() + self.fileobj.seek(0) + self.buf = b"" + else: + self.bz2obj = bz2.BZ2Compressor() + + def read(self, size): + x = len(self.buf) + while x < size: + raw = self.fileobj.read(self.blocksize) + if not raw: + break + data = self.bz2obj.decompress(raw) + self.buf += data + x += len(data) + + buf = self.buf[:size] + self.buf = self.buf[size:] + self.pos += len(buf) + return buf + + def seek(self, pos): + if pos < self.pos: + self.init() + self.read(pos - self.pos) + + def tell(self): + return self.pos + + def write(self, data): + self.pos += len(data) + raw = self.bz2obj.compress(data) + self.fileobj.write(raw) + + def close(self): + if self.mode == "w": + raw = self.bz2obj.flush() + self.fileobj.write(raw) +# class _BZ2Proxy + +#------------------------ +# Extraction file object +#------------------------ +class _FileInFile(object): + """A thin wrapper around an existing file object that + provides a part of its data as an individual file + object. + """ + + def __init__(self, fileobj, offset, size, blockinfo=None): + self.fileobj = fileobj + self.offset = offset + self.size = size + self.position = 0 + + if blockinfo is None: + blockinfo = [(0, size)] + + # Construct a map with data and zero blocks. + self.map_index = 0 + self.map = [] + lastpos = 0 + realpos = self.offset + for offset, size in blockinfo: + if offset > lastpos: + self.map.append((False, lastpos, offset, None)) + self.map.append((True, offset, offset + size, realpos)) + realpos += size + lastpos = offset + size + if lastpos < self.size: + self.map.append((False, lastpos, self.size, None)) + + def seekable(self): + if not hasattr(self.fileobj, "seekable"): + # XXX gzip.GzipFile and bz2.BZ2File + return True + return self.fileobj.seekable() + + def tell(self): + """Return the current file position. + """ + return self.position + + def seek(self, position): + """Seek to a position in the file. + """ + self.position = position + + def read(self, size=None): + """Read data from the file. + """ + if size is None: + size = self.size - self.position + else: + size = min(size, self.size - self.position) + + buf = b"" + while size > 0: + while True: + data, start, stop, offset = self.map[self.map_index] + if start <= self.position < stop: + break + else: + self.map_index += 1 + if self.map_index == len(self.map): + self.map_index = 0 + length = min(size, stop - self.position) + if data: + self.fileobj.seek(offset + (self.position - start)) + buf += self.fileobj.read(length) + else: + buf += NUL * length + size -= length + self.position += length + return buf +#class _FileInFile + + +class ExFileObject(object): + """File-like object for reading an archive member. + Is returned by TarFile.extractfile(). + """ + blocksize = 1024 + + def __init__(self, tarfile, tarinfo): + self.fileobj = _FileInFile(tarfile.fileobj, + tarinfo.offset_data, + tarinfo.size, + tarinfo.sparse) + self.name = tarinfo.name + self.mode = "r" + self.closed = False + self.size = tarinfo.size + + self.position = 0 + self.buffer = b"" + + def readable(self): + return True + + def writable(self): + return False + + def seekable(self): + return self.fileobj.seekable() + + def read(self, size=None): + """Read at most size bytes from the file. If size is not + present or None, read all data until EOF is reached. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + buf = b"" + if self.buffer: + if size is None: + buf = self.buffer + self.buffer = b"" + else: + buf = self.buffer[:size] + self.buffer = self.buffer[size:] + + if size is None: + buf += self.fileobj.read() + else: + buf += self.fileobj.read(size - len(buf)) + + self.position += len(buf) + return buf + + # XXX TextIOWrapper uses the read1() method. + read1 = read + + def readline(self, size=-1): + """Read one entire line from the file. If size is present + and non-negative, return a string with at most that + size, which may be an incomplete line. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + pos = self.buffer.find(b"\n") + 1 + if pos == 0: + # no newline found. + while True: + buf = self.fileobj.read(self.blocksize) + self.buffer += buf + if not buf or b"\n" in buf: + pos = self.buffer.find(b"\n") + 1 + if pos == 0: + # no newline found. + pos = len(self.buffer) + break + + if size != -1: + pos = min(size, pos) + + buf = self.buffer[:pos] + self.buffer = self.buffer[pos:] + self.position += len(buf) + return buf + + def readlines(self): + """Return a list with all remaining lines. + """ + result = [] + while True: + line = self.readline() + if not line: break + result.append(line) + return result + + def tell(self): + """Return the current file position. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + return self.position + + def seek(self, pos, whence=os.SEEK_SET): + """Seek to a position in the file. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + if whence == os.SEEK_SET: + self.position = min(max(pos, 0), self.size) + elif whence == os.SEEK_CUR: + if pos < 0: + self.position = max(self.position + pos, 0) + else: + self.position = min(self.position + pos, self.size) + elif whence == os.SEEK_END: + self.position = max(min(self.size + pos, self.size), 0) + else: + raise ValueError("Invalid argument") + + self.buffer = b"" + self.fileobj.seek(self.position) + + def close(self): + """Close the file object. + """ + self.closed = True + + def __iter__(self): + """Get an iterator over the file's lines. + """ + while True: + line = self.readline() + if not line: + break + yield line +#class ExFileObject + +#------------------ +# Exported Classes +#------------------ +class TarInfo(object): + """Informational class which holds the details about an + archive member given by a tar header block. + TarInfo objects are returned by TarFile.getmember(), + TarFile.getmembers() and TarFile.gettarinfo() and are + usually created internally. + """ + + __slots__ = ("name", "mode", "uid", "gid", "size", "mtime", + "chksum", "type", "linkname", "uname", "gname", + "devmajor", "devminor", + "offset", "offset_data", "pax_headers", "sparse", + "tarfile", "_sparse_structs", "_link_target") + + def __init__(self, name=""): + """Construct a TarInfo object. name is the optional name + of the member. + """ + self.name = name # member name + self.mode = 0o644 # file permissions + self.uid = 0 # user id + self.gid = 0 # group id + self.size = 0 # file size + self.mtime = 0 # modification time + self.chksum = 0 # header checksum + self.type = REGTYPE # member type + self.linkname = "" # link name + self.uname = "" # user name + self.gname = "" # group name + self.devmajor = 0 # device major number + self.devminor = 0 # device minor number + + self.offset = 0 # the tar header starts here + self.offset_data = 0 # the file's data starts here + + self.sparse = None # sparse member information + self.pax_headers = {} # pax header information + + # In pax headers the "name" and "linkname" field are called + # "path" and "linkpath". + def _getpath(self): + return self.name + def _setpath(self, name): + self.name = name + path = property(_getpath, _setpath) + + def _getlinkpath(self): + return self.linkname + def _setlinkpath(self, linkname): + self.linkname = linkname + linkpath = property(_getlinkpath, _setlinkpath) + + def __repr__(self): + return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) + + def get_info(self): + """Return the TarInfo's attributes as a dictionary. + """ + info = { + "name": self.name, + "mode": self.mode & 0o7777, + "uid": self.uid, + "gid": self.gid, + "size": self.size, + "mtime": self.mtime, + "chksum": self.chksum, + "type": self.type, + "linkname": self.linkname, + "uname": self.uname, + "gname": self.gname, + "devmajor": self.devmajor, + "devminor": self.devminor + } + + if info["type"] == DIRTYPE and not info["name"].endswith("/"): + info["name"] += "/" + + return info + + def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"): + """Return a tar header as a string of 512 byte blocks. + """ + info = self.get_info() + + if format == USTAR_FORMAT: + return self.create_ustar_header(info, encoding, errors) + elif format == GNU_FORMAT: + return self.create_gnu_header(info, encoding, errors) + elif format == PAX_FORMAT: + return self.create_pax_header(info, encoding) + else: + raise ValueError("invalid format") + + def create_ustar_header(self, info, encoding, errors): + """Return the object as a ustar header block. + """ + info["magic"] = POSIX_MAGIC + + if len(info["linkname"]) > LENGTH_LINK: + raise ValueError("linkname is too long") + + if len(info["name"]) > LENGTH_NAME: + info["prefix"], info["name"] = self._posix_split_name(info["name"]) + + return self._create_header(info, USTAR_FORMAT, encoding, errors) + + def create_gnu_header(self, info, encoding, errors): + """Return the object as a GNU header block sequence. + """ + info["magic"] = GNU_MAGIC + + buf = b"" + if len(info["linkname"]) > LENGTH_LINK: + buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors) + + if len(info["name"]) > LENGTH_NAME: + buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors) + + return buf + self._create_header(info, GNU_FORMAT, encoding, errors) + + def create_pax_header(self, info, encoding): + """Return the object as a ustar header block. If it cannot be + represented this way, prepend a pax extended header sequence + with supplement information. + """ + info["magic"] = POSIX_MAGIC + pax_headers = self.pax_headers.copy() + + # Test string fields for values that exceed the field length or cannot + # be represented in ASCII encoding. + for name, hname, length in ( + ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK), + ("uname", "uname", 32), ("gname", "gname", 32)): + + if hname in pax_headers: + # The pax header has priority. + continue + + # Try to encode the string as ASCII. + try: + info[name].encode("ascii", "strict") + except UnicodeEncodeError: + pax_headers[hname] = info[name] + continue + + if len(info[name]) > length: + pax_headers[hname] = info[name] + + # Test number fields for values that exceed the field limit or values + # that like to be stored as float. + for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)): + if name in pax_headers: + # The pax header has priority. Avoid overflow. + info[name] = 0 + continue + + val = info[name] + if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float): + pax_headers[name] = str(val) + info[name] = 0 + + # Create a pax extended header if necessary. + if pax_headers: + buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding) + else: + buf = b"" + + return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace") + + @classmethod + def create_pax_global_header(cls, pax_headers): + """Return the object as a pax global header block sequence. + """ + return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8") + + def _posix_split_name(self, name): + """Split a name longer than 100 chars into a prefix + and a name part. + """ + prefix = name[:LENGTH_PREFIX + 1] + while prefix and prefix[-1] != "/": + prefix = prefix[:-1] + + name = name[len(prefix):] + prefix = prefix[:-1] + + if not prefix or len(name) > LENGTH_NAME: + raise ValueError("name is too long") + return prefix, name + + @staticmethod + def _create_header(info, format, encoding, errors): + """Return a header block. info is a dictionary with file + information, format must be one of the *_FORMAT constants. + """ + parts = [ + stn(info.get("name", ""), 100, encoding, errors), + itn(info.get("mode", 0) & 0o7777, 8, format), + itn(info.get("uid", 0), 8, format), + itn(info.get("gid", 0), 8, format), + itn(info.get("size", 0), 12, format), + itn(info.get("mtime", 0), 12, format), + b" ", # checksum field + info.get("type", REGTYPE), + stn(info.get("linkname", ""), 100, encoding, errors), + info.get("magic", POSIX_MAGIC), + stn(info.get("uname", ""), 32, encoding, errors), + stn(info.get("gname", ""), 32, encoding, errors), + itn(info.get("devmajor", 0), 8, format), + itn(info.get("devminor", 0), 8, format), + stn(info.get("prefix", ""), 155, encoding, errors) + ] + + buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts)) + chksum = calc_chksums(buf[-BLOCKSIZE:])[0] + buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:] + return buf + + @staticmethod + def _create_payload(payload): + """Return the string payload filled with zero bytes + up to the next 512 byte border. + """ + blocks, remainder = divmod(len(payload), BLOCKSIZE) + if remainder > 0: + payload += (BLOCKSIZE - remainder) * NUL + return payload + + @classmethod + def _create_gnu_long_header(cls, name, type, encoding, errors): + """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence + for name. + """ + name = name.encode(encoding, errors) + NUL + + info = {} + info["name"] = "././@LongLink" + info["type"] = type + info["size"] = len(name) + info["magic"] = GNU_MAGIC + + # create extended header + name blocks. + return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \ + cls._create_payload(name) + + @classmethod + def _create_pax_generic_header(cls, pax_headers, type, encoding): + """Return a POSIX.1-2008 extended or global header sequence + that contains a list of keyword, value pairs. The values + must be strings. + """ + # Check if one of the fields contains surrogate characters and thereby + # forces hdrcharset=BINARY, see _proc_pax() for more information. + binary = False + for keyword, value in pax_headers.items(): + try: + value.encode("utf8", "strict") + except UnicodeEncodeError: + binary = True + break + + records = b"" + if binary: + # Put the hdrcharset field at the beginning of the header. + records += b"21 hdrcharset=BINARY\n" + + for keyword, value in pax_headers.items(): + keyword = keyword.encode("utf8") + if binary: + # Try to restore the original byte representation of `value'. + # Needless to say, that the encoding must match the string. + value = value.encode(encoding, "surrogateescape") + else: + value = value.encode("utf8") + + l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' + n = p = 0 + while True: + n = l + len(str(p)) + if n == p: + break + p = n + records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n" + + # We use a hardcoded "././@PaxHeader" name like star does + # instead of the one that POSIX recommends. + info = {} + info["name"] = "././@PaxHeader" + info["type"] = type + info["size"] = len(records) + info["magic"] = POSIX_MAGIC + + # Create pax header + record blocks. + return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \ + cls._create_payload(records) + + @classmethod + def frombuf(cls, buf, encoding, errors): + """Construct a TarInfo object from a 512 byte bytes object. + """ + if len(buf) == 0: + raise EmptyHeaderError("empty header") + if len(buf) != BLOCKSIZE: + raise TruncatedHeaderError("truncated header") + if buf.count(NUL) == BLOCKSIZE: + raise EOFHeaderError("end of file header") + + chksum = nti(buf[148:156]) + if chksum not in calc_chksums(buf): + raise InvalidHeaderError("bad checksum") + + obj = cls() + obj.name = nts(buf[0:100], encoding, errors) + obj.mode = nti(buf[100:108]) + obj.uid = nti(buf[108:116]) + obj.gid = nti(buf[116:124]) + obj.size = nti(buf[124:136]) + obj.mtime = nti(buf[136:148]) + obj.chksum = chksum + obj.type = buf[156:157] + obj.linkname = nts(buf[157:257], encoding, errors) + obj.uname = nts(buf[265:297], encoding, errors) + obj.gname = nts(buf[297:329], encoding, errors) + obj.devmajor = nti(buf[329:337]) + obj.devminor = nti(buf[337:345]) + prefix = nts(buf[345:500], encoding, errors) + + # Old V7 tar format represents a directory as a regular + # file with a trailing slash. + if obj.type == AREGTYPE and obj.name.endswith("/"): + obj.type = DIRTYPE + + # The old GNU sparse format occupies some of the unused + # space in the buffer for up to 4 sparse structures. + # Save the them for later processing in _proc_sparse(). + if obj.type == GNUTYPE_SPARSE: + pos = 386 + structs = [] + for i in range(4): + try: + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) + except ValueError: + break + structs.append((offset, numbytes)) + pos += 24 + isextended = bool(buf[482]) + origsize = nti(buf[483:495]) + obj._sparse_structs = (structs, isextended, origsize) + + # Remove redundant slashes from directories. + if obj.isdir(): + obj.name = obj.name.rstrip("/") + + # Reconstruct a ustar longname. + if prefix and obj.type not in GNU_TYPES: + obj.name = prefix + "/" + obj.name + return obj + + @classmethod + def fromtarfile(cls, tarfile): + """Return the next TarInfo object from TarFile object + tarfile. + """ + buf = tarfile.fileobj.read(BLOCKSIZE) + obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors) + obj.offset = tarfile.fileobj.tell() - BLOCKSIZE + return obj._proc_member(tarfile) + + #-------------------------------------------------------------------------- + # The following are methods that are called depending on the type of a + # member. The entry point is _proc_member() which can be overridden in a + # subclass to add custom _proc_*() methods. A _proc_*() method MUST + # implement the following + # operations: + # 1. Set self.offset_data to the position where the data blocks begin, + # if there is data that follows. + # 2. Set tarfile.offset to the position where the next member's header will + # begin. + # 3. Return self or another valid TarInfo object. + def _proc_member(self, tarfile): + """Choose the right processing method depending on + the type and call it. + """ + if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK): + return self._proc_gnulong(tarfile) + elif self.type == GNUTYPE_SPARSE: + return self._proc_sparse(tarfile) + elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE): + return self._proc_pax(tarfile) + else: + return self._proc_builtin(tarfile) + + def _proc_builtin(self, tarfile): + """Process a builtin type or an unknown type which + will be treated as a regular file. + """ + self.offset_data = tarfile.fileobj.tell() + offset = self.offset_data + if self.isreg() or self.type not in SUPPORTED_TYPES: + # Skip the following data blocks. + offset += self._block(self.size) + tarfile.offset = offset + + # Patch the TarInfo object with saved global + # header information. + self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors) + + return self + + def _proc_gnulong(self, tarfile): + """Process the blocks that hold a GNU longname + or longlink member. + """ + buf = tarfile.fileobj.read(self._block(self.size)) + + # Fetch the next header and process it. + try: + next = self.fromtarfile(tarfile) + except HeaderError: + raise SubsequentHeaderError("missing or bad subsequent header") + + # Patch the TarInfo object from the next header with + # the longname information. + next.offset = self.offset + if self.type == GNUTYPE_LONGNAME: + next.name = nts(buf, tarfile.encoding, tarfile.errors) + elif self.type == GNUTYPE_LONGLINK: + next.linkname = nts(buf, tarfile.encoding, tarfile.errors) + + return next + + def _proc_sparse(self, tarfile): + """Process a GNU sparse header plus extra headers. + """ + # We already collected some sparse structures in frombuf(). + structs, isextended, origsize = self._sparse_structs + del self._sparse_structs + + # Collect sparse structures from extended header blocks. + while isextended: + buf = tarfile.fileobj.read(BLOCKSIZE) + pos = 0 + for i in range(21): + try: + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) + except ValueError: + break + if offset and numbytes: + structs.append((offset, numbytes)) + pos += 24 + isextended = bool(buf[504]) + self.sparse = structs + + self.offset_data = tarfile.fileobj.tell() + tarfile.offset = self.offset_data + self._block(self.size) + self.size = origsize + return self + + def _proc_pax(self, tarfile): + """Process an extended or global header as described in + POSIX.1-2008. + """ + # Read the header information. + buf = tarfile.fileobj.read(self._block(self.size)) + + # A pax header stores supplemental information for either + # the following file (extended) or all following files + # (global). + if self.type == XGLTYPE: + pax_headers = tarfile.pax_headers + else: + pax_headers = tarfile.pax_headers.copy() + + # Check if the pax header contains a hdrcharset field. This tells us + # the encoding of the path, linkpath, uname and gname fields. Normally, + # these fields are UTF-8 encoded but since POSIX.1-2008 tar + # implementations are allowed to store them as raw binary strings if + # the translation to UTF-8 fails. + match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf) + if match is not None: + pax_headers["hdrcharset"] = match.group(1).decode("utf8") + + # For the time being, we don't care about anything other than "BINARY". + # The only other value that is currently allowed by the standard is + # "ISO-IR 10646 2000 UTF-8" in other words UTF-8. + hdrcharset = pax_headers.get("hdrcharset") + if hdrcharset == "BINARY": + encoding = tarfile.encoding + else: + encoding = "utf8" + + # Parse pax header information. A record looks like that: + # "%d %s=%s\n" % (length, keyword, value). length is the size + # of the complete record including the length field itself and + # the newline. keyword and value are both UTF-8 encoded strings. + regex = re.compile(br"(\d+) ([^=]+)=") + pos = 0 + while True: + match = regex.match(buf, pos) + if not match: + break + + length, keyword = match.groups() + length = int(length) + value = buf[match.end(2) + 1:match.start(1) + length - 1] + + # Normally, we could just use "utf8" as the encoding and "strict" + # as the error handler, but we better not take the risk. For + # example, GNU tar <= 1.23 is known to store filenames it cannot + # translate to UTF-8 as raw strings (unfortunately without a + # hdrcharset=BINARY header). + # We first try the strict standard encoding, and if that fails we + # fall back on the user's encoding and error handler. + keyword = self._decode_pax_field(keyword, "utf8", "utf8", + tarfile.errors) + if keyword in PAX_NAME_FIELDS: + value = self._decode_pax_field(value, encoding, tarfile.encoding, + tarfile.errors) + else: + value = self._decode_pax_field(value, "utf8", "utf8", + tarfile.errors) + + pax_headers[keyword] = value + pos += length + + # Fetch the next header. + try: + next = self.fromtarfile(tarfile) + except HeaderError: + raise SubsequentHeaderError("missing or bad subsequent header") + + # Process GNU sparse information. + if "GNU.sparse.map" in pax_headers: + # GNU extended sparse format version 0.1. + self._proc_gnusparse_01(next, pax_headers) + + elif "GNU.sparse.size" in pax_headers: + # GNU extended sparse format version 0.0. + self._proc_gnusparse_00(next, pax_headers, buf) + + elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0": + # GNU extended sparse format version 1.0. + self._proc_gnusparse_10(next, pax_headers, tarfile) + + if self.type in (XHDTYPE, SOLARIS_XHDTYPE): + # Patch the TarInfo object with the extended header info. + next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors) + next.offset = self.offset + + if "size" in pax_headers: + # If the extended header replaces the size field, + # we need to recalculate the offset where the next + # header starts. + offset = next.offset_data + if next.isreg() or next.type not in SUPPORTED_TYPES: + offset += next._block(next.size) + tarfile.offset = offset + + return next + + def _proc_gnusparse_00(self, next, pax_headers, buf): + """Process a GNU tar extended sparse header, version 0.0. + """ + offsets = [] + for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf): + offsets.append(int(match.group(1))) + numbytes = [] + for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf): + numbytes.append(int(match.group(1))) + next.sparse = list(zip(offsets, numbytes)) + + def _proc_gnusparse_01(self, next, pax_headers): + """Process a GNU tar extended sparse header, version 0.1. + """ + sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")] + next.sparse = list(zip(sparse[::2], sparse[1::2])) + + def _proc_gnusparse_10(self, next, pax_headers, tarfile): + """Process a GNU tar extended sparse header, version 1.0. + """ + fields = None + sparse = [] + buf = tarfile.fileobj.read(BLOCKSIZE) + fields, buf = buf.split(b"\n", 1) + fields = int(fields) + while len(sparse) < fields * 2: + if b"\n" not in buf: + buf += tarfile.fileobj.read(BLOCKSIZE) + number, buf = buf.split(b"\n", 1) + sparse.append(int(number)) + next.offset_data = tarfile.fileobj.tell() + next.sparse = list(zip(sparse[::2], sparse[1::2])) + + def _apply_pax_info(self, pax_headers, encoding, errors): + """Replace fields with supplemental information from a previous + pax extended or global header. + """ + for keyword, value in pax_headers.items(): + if keyword == "GNU.sparse.name": + setattr(self, "path", value) + elif keyword == "GNU.sparse.size": + setattr(self, "size", int(value)) + elif keyword == "GNU.sparse.realsize": + setattr(self, "size", int(value)) + elif keyword in PAX_FIELDS: + if keyword in PAX_NUMBER_FIELDS: + try: + value = PAX_NUMBER_FIELDS[keyword](value) + except ValueError: + value = 0 + if keyword == "path": + value = value.rstrip("/") + setattr(self, keyword, value) + + self.pax_headers = pax_headers.copy() + + def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors): + """Decode a single field from a pax record. + """ + try: + return value.decode(encoding, "strict") + except UnicodeDecodeError: + return value.decode(fallback_encoding, fallback_errors) + + def _block(self, count): + """Round up a byte count by BLOCKSIZE and return it, + e.g. _block(834) => 1024. + """ + blocks, remainder = divmod(count, BLOCKSIZE) + if remainder: + blocks += 1 + return blocks * BLOCKSIZE + + def isreg(self): + return self.type in REGULAR_TYPES + def isfile(self): + return self.isreg() + def isdir(self): + return self.type == DIRTYPE + def issym(self): + return self.type == SYMTYPE + def islnk(self): + return self.type == LNKTYPE + def ischr(self): + return self.type == CHRTYPE + def isblk(self): + return self.type == BLKTYPE + def isfifo(self): + return self.type == FIFOTYPE + def issparse(self): + return self.sparse is not None + def isdev(self): + return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE) +# class TarInfo + +class TarFile(object): + """The TarFile Class provides an interface to tar archives. + """ + + debug = 0 # May be set from 0 (no msgs) to 3 (all msgs) + + dereference = False # If true, add content of linked file to the + # tar file, else the link. + + ignore_zeros = False # If true, skips empty or invalid blocks and + # continues processing. + + errorlevel = 1 # If 0, fatal errors only appear in debug + # messages (if debug >= 0). If > 0, errors + # are passed to the caller as exceptions. + + format = DEFAULT_FORMAT # The format to use when creating an archive. + + encoding = ENCODING # Encoding for 8-bit character strings. + + errors = None # Error handler for unicode conversion. + + tarinfo = TarInfo # The default TarInfo class to use. + + fileobject = ExFileObject # The default ExFileObject class to use. + + def __init__(self, name=None, mode="r", fileobj=None, format=None, + tarinfo=None, dereference=None, ignore_zeros=None, encoding=None, + errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None): + """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to + read from an existing archive, 'a' to append data to an existing + file or 'w' to create a new file overwriting an existing one. `mode' + defaults to 'r'. + If `fileobj' is given, it is used for reading or writing data. If it + can be determined, `mode' is overridden by `fileobj's mode. + `fileobj' is not closed, when TarFile is closed. + """ + if len(mode) > 1 or mode not in "raw": + raise ValueError("mode must be 'r', 'a' or 'w'") + self.mode = mode + self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode] + + if not fileobj: + if self.mode == "a" and not os.path.exists(name): + # Create nonexistent files in append mode. + self.mode = "w" + self._mode = "wb" + fileobj = bltn_open(name, self._mode) + self._extfileobj = False + else: + if name is None and hasattr(fileobj, "name"): + name = fileobj.name + if hasattr(fileobj, "mode"): + self._mode = fileobj.mode + self._extfileobj = True + self.name = os.path.abspath(name) if name else None + self.fileobj = fileobj + + # Init attributes. + if format is not None: + self.format = format + if tarinfo is not None: + self.tarinfo = tarinfo + if dereference is not None: + self.dereference = dereference + if ignore_zeros is not None: + self.ignore_zeros = ignore_zeros + if encoding is not None: + self.encoding = encoding + self.errors = errors + + if pax_headers is not None and self.format == PAX_FORMAT: + self.pax_headers = pax_headers + else: + self.pax_headers = {} + + if debug is not None: + self.debug = debug + if errorlevel is not None: + self.errorlevel = errorlevel + + # Init datastructures. + self.closed = False + self.members = [] # list of members as TarInfo objects + self._loaded = False # flag if all members have been read + self.offset = self.fileobj.tell() + # current position in the archive file + self.inodes = {} # dictionary caching the inodes of + # archive members already added + + try: + if self.mode == "r": + self.firstmember = None + self.firstmember = self.next() + + if self.mode == "a": + # Move to the end of the archive, + # before the first empty block. + while True: + self.fileobj.seek(self.offset) + try: + tarinfo = self.tarinfo.fromtarfile(self) + self.members.append(tarinfo) + except EOFHeaderError: + self.fileobj.seek(self.offset) + break + except HeaderError as e: + raise ReadError(str(e)) + + if self.mode in "aw": + self._loaded = True + + if self.pax_headers: + buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy()) + self.fileobj.write(buf) + self.offset += len(buf) + except: + if not self._extfileobj: + self.fileobj.close() + self.closed = True + raise + + #-------------------------------------------------------------------------- + # Below are the classmethods which act as alternate constructors to the + # TarFile class. The open() method is the only one that is needed for + # public use; it is the "super"-constructor and is able to select an + # adequate "sub"-constructor for a particular compression using the mapping + # from OPEN_METH. + # + # This concept allows one to subclass TarFile without losing the comfort of + # the super-constructor. A sub-constructor is registered and made available + # by adding it to the mapping in OPEN_METH. + + @classmethod + def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs): + """Open a tar archive for reading, writing or appending. Return + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + """ + + if not name and not fileobj: + raise ValueError("nothing to open") + + if mode in ("r", "r:*"): + # Find out which *open() is appropriate for opening the file. + for comptype in cls.OPEN_METH: + func = getattr(cls, cls.OPEN_METH[comptype]) + if fileobj is not None: + saved_pos = fileobj.tell() + try: + return func(name, "r", fileobj, **kwargs) + except (ReadError, CompressionError) as e: + if fileobj is not None: + fileobj.seek(saved_pos) + continue + raise ReadError("file could not be opened successfully") + + elif ":" in mode: + filemode, comptype = mode.split(":", 1) + filemode = filemode or "r" + comptype = comptype or "tar" + + # Select the *open() function according to + # given compression. + if comptype in cls.OPEN_METH: + func = getattr(cls, cls.OPEN_METH[comptype]) + else: + raise CompressionError("unknown compression type %r" % comptype) + return func(name, filemode, fileobj, **kwargs) + + elif "|" in mode: + filemode, comptype = mode.split("|", 1) + filemode = filemode or "r" + comptype = comptype or "tar" + + if filemode not in "rw": + raise ValueError("mode must be 'r' or 'w'") + + stream = _Stream(name, filemode, comptype, fileobj, bufsize) + try: + t = cls(name, filemode, stream, **kwargs) + except: + stream.close() + raise + t._extfileobj = False + return t + + elif mode in "aw": + return cls.taropen(name, mode, fileobj, **kwargs) + + raise ValueError("undiscernible mode") + + @classmethod + def taropen(cls, name, mode="r", fileobj=None, **kwargs): + """Open uncompressed tar archive name for reading or writing. + """ + if len(mode) > 1 or mode not in "raw": + raise ValueError("mode must be 'r', 'a' or 'w'") + return cls(name, mode, fileobj, **kwargs) + + @classmethod + def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): + """Open gzip compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if len(mode) > 1 or mode not in "rw": + raise ValueError("mode must be 'r' or 'w'") + + try: + import gzip + gzip.GzipFile + except (ImportError, AttributeError): + raise CompressionError("gzip module is not available") + + extfileobj = fileobj is not None + try: + fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj) + t = cls.taropen(name, mode, fileobj, **kwargs) + except IOError: + if not extfileobj and fileobj is not None: + fileobj.close() + if fileobj is None: + raise + raise ReadError("not a gzip file") + except: + if not extfileobj and fileobj is not None: + fileobj.close() + raise + t._extfileobj = extfileobj + return t + + @classmethod + def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): + """Open bzip2 compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if len(mode) > 1 or mode not in "rw": + raise ValueError("mode must be 'r' or 'w'.") + + try: + import bz2 + except ImportError: + raise CompressionError("bz2 module is not available") + + if fileobj is not None: + fileobj = _BZ2Proxy(fileobj, mode) + else: + fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel) + + try: + t = cls.taropen(name, mode, fileobj, **kwargs) + except (IOError, EOFError): + fileobj.close() + raise ReadError("not a bzip2 file") + t._extfileobj = False + return t + + # All *open() methods are registered here. + OPEN_METH = { + "tar": "taropen", # uncompressed tar + "gz": "gzopen", # gzip compressed tar + "bz2": "bz2open" # bzip2 compressed tar + } + + #-------------------------------------------------------------------------- + # The public methods which TarFile provides: + + def close(self): + """Close the TarFile. In write-mode, two finishing zero blocks are + appended to the archive. + """ + if self.closed: + return + + if self.mode in "aw": + self.fileobj.write(NUL * (BLOCKSIZE * 2)) + self.offset += (BLOCKSIZE * 2) + # fill up the end with zero-blocks + # (like option -b20 for tar does) + blocks, remainder = divmod(self.offset, RECORDSIZE) + if remainder > 0: + self.fileobj.write(NUL * (RECORDSIZE - remainder)) + + if not self._extfileobj: + self.fileobj.close() + self.closed = True + + def getmember(self, name): + """Return a TarInfo object for member `name'. If `name' can not be + found in the archive, KeyError is raised. If a member occurs more + than once in the archive, its last occurrence is assumed to be the + most up-to-date version. + """ + tarinfo = self._getmember(name) + if tarinfo is None: + raise KeyError("filename %r not found" % name) + return tarinfo + + def getmembers(self): + """Return the members of the archive as a list of TarInfo objects. The + list has the same order as the members in the archive. + """ + self._check() + if not self._loaded: # if we want to obtain a list of + self._load() # all members, we first have to + # scan the whole archive. + return self.members + + def getnames(self): + """Return the members of the archive as a list of their names. It has + the same order as the list returned by getmembers(). + """ + return [tarinfo.name for tarinfo in self.getmembers()] + + def gettarinfo(self, name=None, arcname=None, fileobj=None): + """Create a TarInfo object for either the file `name' or the file + object `fileobj' (using os.fstat on its file descriptor). You can + modify some of the TarInfo's attributes before you add it using + addfile(). If given, `arcname' specifies an alternative name for the + file in the archive. + """ + self._check("aw") + + # When fileobj is given, replace name by + # fileobj's real name. + if fileobj is not None: + name = fileobj.name + + # Building the name of the member in the archive. + # Backward slashes are converted to forward slashes, + # Absolute paths are turned to relative paths. + if arcname is None: + arcname = name + drv, arcname = os.path.splitdrive(arcname) + arcname = arcname.replace(os.sep, "/") + arcname = arcname.lstrip("/") + + # Now, fill the TarInfo object with + # information specific for the file. + tarinfo = self.tarinfo() + tarinfo.tarfile = self + + # Use os.stat or os.lstat, depending on platform + # and if symlinks shall be resolved. + if fileobj is None: + if hasattr(os, "lstat") and not self.dereference: + statres = os.lstat(name) + else: + statres = os.stat(name) + else: + statres = os.fstat(fileobj.fileno()) + linkname = "" + + stmd = statres.st_mode + if stat.S_ISREG(stmd): + inode = (statres.st_ino, statres.st_dev) + if not self.dereference and statres.st_nlink > 1 and \ + inode in self.inodes and arcname != self.inodes[inode]: + # Is it a hardlink to an already + # archived file? + type = LNKTYPE + linkname = self.inodes[inode] + else: + # The inode is added only if its valid. + # For win32 it is always 0. + type = REGTYPE + if inode[0]: + self.inodes[inode] = arcname + elif stat.S_ISDIR(stmd): + type = DIRTYPE + elif stat.S_ISFIFO(stmd): + type = FIFOTYPE + elif stat.S_ISLNK(stmd): + type = SYMTYPE + linkname = os.readlink(name) + elif stat.S_ISCHR(stmd): + type = CHRTYPE + elif stat.S_ISBLK(stmd): + type = BLKTYPE + else: + return None + + # Fill the TarInfo object with all + # information we can get. + tarinfo.name = arcname + tarinfo.mode = stmd + tarinfo.uid = statres.st_uid + tarinfo.gid = statres.st_gid + if type == REGTYPE: + tarinfo.size = statres.st_size + else: + tarinfo.size = 0 + tarinfo.mtime = statres.st_mtime + tarinfo.type = type + tarinfo.linkname = linkname + if pwd: + try: + tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0] + except KeyError: + pass + if grp: + try: + tarinfo.gname = grp.getgrgid(tarinfo.gid)[0] + except KeyError: + pass + + if type in (CHRTYPE, BLKTYPE): + if hasattr(os, "major") and hasattr(os, "minor"): + tarinfo.devmajor = os.major(statres.st_rdev) + tarinfo.devminor = os.minor(statres.st_rdev) + return tarinfo + + def list(self, verbose=True): + """Print a table of contents to sys.stdout. If `verbose' is False, only + the names of the members are printed. If it is True, an `ls -l'-like + output is produced. + """ + self._check() + + for tarinfo in self: + if verbose: + print(filemode(tarinfo.mode), end=' ') + print("%s/%s" % (tarinfo.uname or tarinfo.uid, + tarinfo.gname or tarinfo.gid), end=' ') + if tarinfo.ischr() or tarinfo.isblk(): + print("%10s" % ("%d,%d" \ + % (tarinfo.devmajor, tarinfo.devminor)), end=' ') + else: + print("%10d" % tarinfo.size, end=' ') + print("%d-%02d-%02d %02d:%02d:%02d" \ + % time.localtime(tarinfo.mtime)[:6], end=' ') + + print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ') + + if verbose: + if tarinfo.issym(): + print("->", tarinfo.linkname, end=' ') + if tarinfo.islnk(): + print("link to", tarinfo.linkname, end=' ') + print() + + def add(self, name, arcname=None, recursive=True, exclude=None, filter=None): + """Add the file `name' to the archive. `name' may be any type of file + (directory, fifo, symbolic link, etc.). If given, `arcname' + specifies an alternative name for the file in the archive. + Directories are added recursively by default. This can be avoided by + setting `recursive' to False. `exclude' is a function that should + return True for each filename to be excluded. `filter' is a function + that expects a TarInfo object argument and returns the changed + TarInfo object, if it returns None the TarInfo object will be + excluded from the archive. + """ + self._check("aw") + + if arcname is None: + arcname = name + + # Exclude pathnames. + if exclude is not None: + import warnings + warnings.warn("use the filter argument instead", + DeprecationWarning, 2) + if exclude(name): + self._dbg(2, "tarfile: Excluded %r" % name) + return + + # Skip if somebody tries to archive the archive... + if self.name is not None and os.path.abspath(name) == self.name: + self._dbg(2, "tarfile: Skipped %r" % name) + return + + self._dbg(1, name) + + # Create a TarInfo object from the file. + tarinfo = self.gettarinfo(name, arcname) + + if tarinfo is None: + self._dbg(1, "tarfile: Unsupported type %r" % name) + return + + # Change or exclude the TarInfo object. + if filter is not None: + tarinfo = filter(tarinfo) + if tarinfo is None: + self._dbg(2, "tarfile: Excluded %r" % name) + return + + # Append the tar header and data to the archive. + if tarinfo.isreg(): + f = bltn_open(name, "rb") + self.addfile(tarinfo, f) + f.close() + + elif tarinfo.isdir(): + self.addfile(tarinfo) + if recursive: + for f in os.listdir(name): + self.add(os.path.join(name, f), os.path.join(arcname, f), + recursive, exclude, filter=filter) + + else: + self.addfile(tarinfo) + + def addfile(self, tarinfo, fileobj=None): + """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is + given, tarinfo.size bytes are read from it and added to the archive. + You can create TarInfo objects using gettarinfo(). + On Windows platforms, `fileobj' should always be opened with mode + 'rb' to avoid irritation about the file size. + """ + self._check("aw") + + tarinfo = copy.copy(tarinfo) + + buf = tarinfo.tobuf(self.format, self.encoding, self.errors) + self.fileobj.write(buf) + self.offset += len(buf) + + # If there's data to follow, append it. + if fileobj is not None: + copyfileobj(fileobj, self.fileobj, tarinfo.size) + blocks, remainder = divmod(tarinfo.size, BLOCKSIZE) + if remainder > 0: + self.fileobj.write(NUL * (BLOCKSIZE - remainder)) + blocks += 1 + self.offset += blocks * BLOCKSIZE + + self.members.append(tarinfo) + + def extractall(self, path=".", members=None): + """Extract all members from the archive to the current working + directory and set owner, modification time and permissions on + directories afterwards. `path' specifies a different directory + to extract to. `members' is optional and must be a subset of the + list returned by getmembers(). + """ + directories = [] + + if members is None: + members = self + + for tarinfo in members: + if tarinfo.isdir(): + # Extract directories with a safe mode. + directories.append(tarinfo) + tarinfo = copy.copy(tarinfo) + tarinfo.mode = 0o700 + # Do not set_attrs directories, as we will do that further down + self.extract(tarinfo, path, set_attrs=not tarinfo.isdir()) + + # Reverse sort directories. + directories.sort(key=lambda a: a.name) + directories.reverse() + + # Set correct owner, mtime and filemode on directories. + for tarinfo in directories: + dirpath = os.path.join(path, tarinfo.name) + try: + self.chown(tarinfo, dirpath) + self.utime(tarinfo, dirpath) + self.chmod(tarinfo, dirpath) + except ExtractError as e: + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def extract(self, member, path="", set_attrs=True): + """Extract a member from the archive to the current working directory, + using its full name. Its file information is extracted as accurately + as possible. `member' may be a filename or a TarInfo object. You can + specify a different directory using `path'. File attributes (owner, + mtime, mode) are set unless `set_attrs' is False. + """ + self._check("r") + + if isinstance(member, str): + tarinfo = self.getmember(member) + else: + tarinfo = member + + # Prepare the link target for makelink(). + if tarinfo.islnk(): + tarinfo._link_target = os.path.join(path, tarinfo.linkname) + + try: + self._extract_member(tarinfo, os.path.join(path, tarinfo.name), + set_attrs=set_attrs) + except EnvironmentError as e: + if self.errorlevel > 0: + raise + else: + if e.filename is None: + self._dbg(1, "tarfile: %s" % e.strerror) + else: + self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) + except ExtractError as e: + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def extractfile(self, member): + """Extract a member from the archive as a file object. `member' may be + a filename or a TarInfo object. If `member' is a regular file, a + file-like object is returned. If `member' is a link, a file-like + object is constructed from the link's target. If `member' is none of + the above, None is returned. + The file-like object is read-only and provides the following + methods: read(), readline(), readlines(), seek() and tell() + """ + self._check("r") + + if isinstance(member, str): + tarinfo = self.getmember(member) + else: + tarinfo = member + + if tarinfo.isreg(): + return self.fileobject(self, tarinfo) + + elif tarinfo.type not in SUPPORTED_TYPES: + # If a member's type is unknown, it is treated as a + # regular file. + return self.fileobject(self, tarinfo) + + elif tarinfo.islnk() or tarinfo.issym(): + if isinstance(self.fileobj, _Stream): + # A small but ugly workaround for the case that someone tries + # to extract a (sym)link as a file-object from a non-seekable + # stream of tar blocks. + raise StreamError("cannot extract (sym)link as file object") + else: + # A (sym)link's file object is its target's file object. + return self.extractfile(self._find_link_target(tarinfo)) + else: + # If there's no data associated with the member (directory, chrdev, + # blkdev, etc.), return None instead of a file object. + return None + + def _extract_member(self, tarinfo, targetpath, set_attrs=True): + """Extract the TarInfo object tarinfo to a physical + file called targetpath. + """ + # Fetch the TarInfo object for the given name + # and build the destination pathname, replacing + # forward slashes to platform specific separators. + targetpath = targetpath.rstrip("/") + targetpath = targetpath.replace("/", os.sep) + + # Create all upper directories. + upperdirs = os.path.dirname(targetpath) + if upperdirs and not os.path.exists(upperdirs): + # Create directories that are not part of the archive with + # default permissions. + os.makedirs(upperdirs) + + if tarinfo.islnk() or tarinfo.issym(): + self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname)) + else: + self._dbg(1, tarinfo.name) + + if tarinfo.isreg(): + self.makefile(tarinfo, targetpath) + elif tarinfo.isdir(): + self.makedir(tarinfo, targetpath) + elif tarinfo.isfifo(): + self.makefifo(tarinfo, targetpath) + elif tarinfo.ischr() or tarinfo.isblk(): + self.makedev(tarinfo, targetpath) + elif tarinfo.islnk() or tarinfo.issym(): + self.makelink(tarinfo, targetpath) + elif tarinfo.type not in SUPPORTED_TYPES: + self.makeunknown(tarinfo, targetpath) + else: + self.makefile(tarinfo, targetpath) + + if set_attrs: + self.chown(tarinfo, targetpath) + if not tarinfo.issym(): + self.chmod(tarinfo, targetpath) + self.utime(tarinfo, targetpath) + + #-------------------------------------------------------------------------- + # Below are the different file methods. They are called via + # _extract_member() when extract() is called. They can be replaced in a + # subclass to implement other functionality. + + def makedir(self, tarinfo, targetpath): + """Make a directory called targetpath. + """ + try: + # Use a safe mode for the directory, the real mode is set + # later in _extract_member(). + os.mkdir(targetpath, 0o700) + except EnvironmentError as e: + if e.errno != errno.EEXIST: + raise + + def makefile(self, tarinfo, targetpath): + """Make a file called targetpath. + """ + source = self.fileobj + source.seek(tarinfo.offset_data) + target = bltn_open(targetpath, "wb") + if tarinfo.sparse is not None: + for offset, size in tarinfo.sparse: + target.seek(offset) + copyfileobj(source, target, size) + else: + copyfileobj(source, target, tarinfo.size) + target.seek(tarinfo.size) + target.truncate() + target.close() + + def makeunknown(self, tarinfo, targetpath): + """Make a file from a TarInfo object with an unknown type + at targetpath. + """ + self.makefile(tarinfo, targetpath) + self._dbg(1, "tarfile: Unknown file type %r, " \ + "extracted as regular file." % tarinfo.type) + + def makefifo(self, tarinfo, targetpath): + """Make a fifo called targetpath. + """ + if hasattr(os, "mkfifo"): + os.mkfifo(targetpath) + else: + raise ExtractError("fifo not supported by system") + + def makedev(self, tarinfo, targetpath): + """Make a character or block device called targetpath. + """ + if not hasattr(os, "mknod") or not hasattr(os, "makedev"): + raise ExtractError("special devices not supported by system") + + mode = tarinfo.mode + if tarinfo.isblk(): + mode |= stat.S_IFBLK + else: + mode |= stat.S_IFCHR + + os.mknod(targetpath, mode, + os.makedev(tarinfo.devmajor, tarinfo.devminor)) + + def makelink(self, tarinfo, targetpath): + """Make a (symbolic) link called targetpath. If it cannot be created + (platform limitation), we try to make a copy of the referenced file + instead of a link. + """ + try: + # For systems that support symbolic and hard links. + if tarinfo.issym(): + os.symlink(tarinfo.linkname, targetpath) + else: + # See extract(). + if os.path.exists(tarinfo._link_target): + os.link(tarinfo._link_target, targetpath) + else: + self._extract_member(self._find_link_target(tarinfo), + targetpath) + except symlink_exception: + if tarinfo.issym(): + linkpath = os.path.join(os.path.dirname(tarinfo.name), + tarinfo.linkname) + else: + linkpath = tarinfo.linkname + else: + try: + self._extract_member(self._find_link_target(tarinfo), + targetpath) + except KeyError: + raise ExtractError("unable to resolve link inside archive") + + def chown(self, tarinfo, targetpath): + """Set owner of targetpath according to tarinfo. + """ + if pwd and hasattr(os, "geteuid") and os.geteuid() == 0: + # We have to be root to do so. + try: + g = grp.getgrnam(tarinfo.gname)[2] + except KeyError: + g = tarinfo.gid + try: + u = pwd.getpwnam(tarinfo.uname)[2] + except KeyError: + u = tarinfo.uid + try: + if tarinfo.issym() and hasattr(os, "lchown"): + os.lchown(targetpath, u, g) + else: + if sys.platform != "os2emx": + os.chown(targetpath, u, g) + except EnvironmentError as e: + raise ExtractError("could not change owner") + + def chmod(self, tarinfo, targetpath): + """Set file permissions of targetpath according to tarinfo. + """ + if hasattr(os, 'chmod'): + try: + os.chmod(targetpath, tarinfo.mode) + except EnvironmentError as e: + raise ExtractError("could not change mode") + + def utime(self, tarinfo, targetpath): + """Set modification time of targetpath according to tarinfo. + """ + if not hasattr(os, 'utime'): + return + try: + os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) + except EnvironmentError as e: + raise ExtractError("could not change modification time") + + #-------------------------------------------------------------------------- + def next(self): + """Return the next member of the archive as a TarInfo object, when + TarFile is opened for reading. Return None if there is no more + available. + """ + self._check("ra") + if self.firstmember is not None: + m = self.firstmember + self.firstmember = None + return m + + # Read the next block. + self.fileobj.seek(self.offset) + tarinfo = None + while True: + try: + tarinfo = self.tarinfo.fromtarfile(self) + except EOFHeaderError as e: + if self.ignore_zeros: + self._dbg(2, "0x%X: %s" % (self.offset, e)) + self.offset += BLOCKSIZE + continue + except InvalidHeaderError as e: + if self.ignore_zeros: + self._dbg(2, "0x%X: %s" % (self.offset, e)) + self.offset += BLOCKSIZE + continue + elif self.offset == 0: + raise ReadError(str(e)) + except EmptyHeaderError: + if self.offset == 0: + raise ReadError("empty file") + except TruncatedHeaderError as e: + if self.offset == 0: + raise ReadError(str(e)) + except SubsequentHeaderError as e: + raise ReadError(str(e)) + break + + if tarinfo is not None: + self.members.append(tarinfo) + else: + self._loaded = True + + return tarinfo + + #-------------------------------------------------------------------------- + # Little helper methods: + + def _getmember(self, name, tarinfo=None, normalize=False): + """Find an archive member by name from bottom to top. + If tarinfo is given, it is used as the starting point. + """ + # Ensure that all members have been loaded. + members = self.getmembers() + + # Limit the member search list up to tarinfo. + if tarinfo is not None: + members = members[:members.index(tarinfo)] + + if normalize: + name = os.path.normpath(name) + + for member in reversed(members): + if normalize: + member_name = os.path.normpath(member.name) + else: + member_name = member.name + + if name == member_name: + return member + + def _load(self): + """Read through the entire archive file and look for readable + members. + """ + while True: + tarinfo = self.next() + if tarinfo is None: + break + self._loaded = True + + def _check(self, mode=None): + """Check if TarFile is still open, and if the operation's mode + corresponds to TarFile's mode. + """ + if self.closed: + raise IOError("%s is closed" % self.__class__.__name__) + if mode is not None and self.mode not in mode: + raise IOError("bad operation for mode %r" % self.mode) + + def _find_link_target(self, tarinfo): + """Find the target member of a symlink or hardlink member in the + archive. + """ + if tarinfo.issym(): + # Always search the entire archive. + linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname + limit = None + else: + # Search the archive before the link, because a hard link is + # just a reference to an already archived file. + linkname = tarinfo.linkname + limit = tarinfo + + member = self._getmember(linkname, tarinfo=limit, normalize=True) + if member is None: + raise KeyError("linkname %r not found" % linkname) + return member + + def __iter__(self): + """Provide an iterator object. + """ + if self._loaded: + return iter(self.members) + else: + return TarIter(self) + + def _dbg(self, level, msg): + """Write debugging output to sys.stderr. + """ + if level <= self.debug: + print(msg, file=sys.stderr) + + def __enter__(self): + self._check() + return self + + def __exit__(self, type, value, traceback): + if type is None: + self.close() + else: + # An exception occurred. We must not call close() because + # it would try to write end-of-archive blocks and padding. + if not self._extfileobj: + self.fileobj.close() + self.closed = True +# class TarFile + +class TarIter(object): + """Iterator Class. + + for tarinfo in TarFile(...): + suite... + """ + + def __init__(self, tarfile): + """Construct a TarIter object. + """ + self.tarfile = tarfile + self.index = 0 + def __iter__(self): + """Return iterator object. + """ + return self + + def __next__(self): + """Return the next item using TarFile's next() method. + When all members have been read, set TarFile as _loaded. + """ + # Fix for SF #1100429: Under rare circumstances it can + # happen that getmembers() is called during iteration, + # which will cause TarIter to stop prematurely. + if not self.tarfile._loaded: + tarinfo = self.tarfile.next() + if not tarinfo: + self.tarfile._loaded = True + raise StopIteration + else: + try: + tarinfo = self.tarfile.members[self.index] + except IndexError: + raise StopIteration + self.index += 1 + return tarinfo + + next = __next__ # for Python 2.x + +#-------------------- +# exported functions +#-------------------- +def is_tarfile(name): + """Return True if name points to a tar archive that we + are able to handle, else return False. + """ + try: + t = open(name) + t.close() + return True + except TarError: + return False + +bltn_open = open +open = TarFile.open diff --git a/pipenv/patched/notpip/distro.LICENSE b/pipenv/patched/notpip/_vendor/distro/LICENSE similarity index 100% rename from pipenv/patched/notpip/distro.LICENSE rename to pipenv/patched/notpip/_vendor/distro/LICENSE diff --git a/pipenv/patched/notpip/_vendor/distro/__init__.py b/pipenv/patched/notpip/_vendor/distro/__init__.py new file mode 100644 index 0000000000..7686fe85a7 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/distro/__init__.py @@ -0,0 +1,54 @@ +from .distro import ( + NORMALIZED_DISTRO_ID, + NORMALIZED_LSB_ID, + NORMALIZED_OS_ID, + LinuxDistribution, + __version__, + build_number, + codename, + distro_release_attr, + distro_release_info, + id, + info, + like, + linux_distribution, + lsb_release_attr, + lsb_release_info, + major_version, + minor_version, + name, + os_release_attr, + os_release_info, + uname_attr, + uname_info, + version, + version_parts, +) + +__all__ = [ + "NORMALIZED_DISTRO_ID", + "NORMALIZED_LSB_ID", + "NORMALIZED_OS_ID", + "LinuxDistribution", + "build_number", + "codename", + "distro_release_attr", + "distro_release_info", + "id", + "info", + "like", + "linux_distribution", + "lsb_release_attr", + "lsb_release_info", + "major_version", + "minor_version", + "name", + "os_release_attr", + "os_release_info", + "uname_attr", + "uname_info", + "version", + "version_parts", +] + +__version__ = __version__ diff --git a/pipenv/patched/notpip/_vendor/distro/__main__.py b/pipenv/patched/notpip/_vendor/distro/__main__.py new file mode 100644 index 0000000000..0c01d5b08b --- /dev/null +++ b/pipenv/patched/notpip/_vendor/distro/__main__.py @@ -0,0 +1,4 @@ +from .distro import main + +if __name__ == "__main__": + main() diff --git a/pipenv/patched/notpip/_vendor/distro.py b/pipenv/patched/notpip/_vendor/distro/distro.py similarity index 83% rename from pipenv/patched/notpip/_vendor/distro.py rename to pipenv/patched/notpip/_vendor/distro/distro.py index 7892741347..49066ae836 100644 --- a/pipenv/patched/notpip/_vendor/distro.py +++ b/pipenv/patched/notpip/_vendor/distro/distro.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python # Copyright 2015,2016,2017 Nir Cohen # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -36,40 +37,39 @@ import subprocess import sys import warnings +from typing import ( + Any, + Callable, + Dict, + Iterable, + Optional, + Sequence, + TextIO, + Tuple, + Type, +) -__version__ = "1.6.0" - -# Use `if False` to avoid an ImportError on Python 2. After dropping Python 2 -# support, can use typing.TYPE_CHECKING instead. See: -# https://docs.python.org/3/library/typing.html#typing.TYPE_CHECKING -if False: # pragma: nocover - from typing import ( - Any, - Callable, - Dict, - Iterable, - Optional, - Sequence, - TextIO, - Tuple, - Type, - TypedDict, - Union, - ) +try: + from typing import TypedDict +except ImportError: + # Python 3.7 + TypedDict = dict - VersionDict = TypedDict( - "VersionDict", {"major": str, "minor": str, "build_number": str} - ) - InfoDict = TypedDict( - "InfoDict", - { - "id": str, - "version": str, - "version_parts": VersionDict, - "like": str, - "codename": str, - }, - ) +__version__ = "1.7.0" + + +class VersionDict(TypedDict): + major: str + minor: str + build_number: str + + +class InfoDict(TypedDict): + id: str + version: str + version_parts: VersionDict + like: str + codename: str _UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc") @@ -85,6 +85,7 @@ #: * Value: Normalized value. NORMALIZED_OS_ID = { "ol": "oracle", # Oracle Linux + "opensuse-leap": "opensuse", # Newer versions of OpenSuSE report as opensuse-leap } #: Translation table for normalizing the "Distributor ID" attribute returned by @@ -133,8 +134,7 @@ ) -def linux_distribution(full_distribution_name=True): - # type: (bool) -> Tuple[str, str, str] +def linux_distribution(full_distribution_name: bool = True) -> Tuple[str, str, str]: """ .. deprecated:: 1.6.0 @@ -151,7 +151,8 @@ def linux_distribution(full_distribution_name=True): * ``version``: The result of :func:`distro.version`. - * ``codename``: The result of :func:`distro.codename`. + * ``codename``: The extra item (usually in parentheses) after the + os-release version number, or the result of :func:`distro.codename`. The interface of this function is compatible with the original :py:func:`platform.linux_distribution` function, supporting a subset of @@ -176,8 +177,7 @@ def linux_distribution(full_distribution_name=True): return _distro.linux_distribution(full_distribution_name) -def id(): - # type: () -> str +def id() -> str: """ Return the distro ID of the current distribution, as a machine-readable string. @@ -198,7 +198,7 @@ def id(): "fedora" Fedora "sles" SUSE Linux Enterprise Server "opensuse" openSUSE - "amazon" Amazon Linux + "amzn" Amazon Linux "arch" Arch Linux "cloudlinux" CloudLinux OS "exherbo" Exherbo Linux @@ -219,6 +219,8 @@ def id(): "netbsd" NetBSD "freebsd" FreeBSD "midnightbsd" MidnightBSD + "rocky" Rocky Linux + "aix" AIX ============== ========================================= If you have a need to get distros for reliable IDs added into this set, @@ -256,8 +258,7 @@ def id(): return _distro.id() -def name(pretty=False): - # type: (bool) -> str +def name(pretty: bool = False) -> str: """ Return the name of the current OS distribution, as a human-readable string. @@ -296,8 +297,7 @@ def name(pretty=False): return _distro.name(pretty) -def version(pretty=False, best=False): - # type: (bool, bool) -> str +def version(pretty: bool = False, best: bool = False) -> str: """ Return the version of the current OS distribution, as a human-readable string. @@ -313,6 +313,10 @@ def version(pretty=False, best=False): sources in a fixed priority order does not always yield the most precise version (e.g. for Debian 8.2, or CentOS 7.1). + Some other distributions may not provide this kind of information. In these + cases, an empty string would be returned. This behavior can be observed + with rolling releases distributions (e.g. Arch Linux). + The *best* parameter can be used to control the approach for the returned version: @@ -341,8 +345,7 @@ def version(pretty=False, best=False): return _distro.version(pretty, best) -def version_parts(best=False): - # type: (bool) -> Tuple[str, str, str] +def version_parts(best: bool = False) -> Tuple[str, str, str]: """ Return the version of the current OS distribution as a tuple ``(major, minor, build_number)`` with items as follows: @@ -359,8 +362,7 @@ def version_parts(best=False): return _distro.version_parts(best) -def major_version(best=False): - # type: (bool) -> str +def major_version(best: bool = False) -> str: """ Return the major version of the current OS distribution, as a string, if provided. @@ -373,8 +375,7 @@ def major_version(best=False): return _distro.major_version(best) -def minor_version(best=False): - # type: (bool) -> str +def minor_version(best: bool = False) -> str: """ Return the minor version of the current OS distribution, as a string, if provided. @@ -387,8 +388,7 @@ def minor_version(best=False): return _distro.minor_version(best) -def build_number(best=False): - # type: (bool) -> str +def build_number(best: bool = False) -> str: """ Return the build number of the current OS distribution, as a string, if provided. @@ -401,8 +401,7 @@ def build_number(best=False): return _distro.build_number(best) -def like(): - # type: () -> str +def like() -> str: """ Return a space-separated list of distro IDs of distributions that are closely related to the current OS distribution in regards to packaging @@ -419,8 +418,7 @@ def like(): return _distro.like() -def codename(): - # type: () -> str +def codename() -> str: """ Return the codename for the release of the current OS distribution, as a string. @@ -444,8 +442,7 @@ def codename(): return _distro.codename() -def info(pretty=False, best=False): - # type: (bool, bool) -> InfoDict +def info(pretty: bool = False, best: bool = False) -> InfoDict: """ Return certain machine-readable information items about the current OS distribution in a dictionary, as shown in the following example: @@ -489,8 +486,7 @@ def info(pretty=False, best=False): return _distro.info(pretty, best) -def os_release_info(): - # type: () -> Dict[str, str] +def os_release_info() -> Dict[str, str]: """ Return a dictionary containing key-value pairs for the information items from the os-release file data source of the current OS distribution. @@ -500,8 +496,7 @@ def os_release_info(): return _distro.os_release_info() -def lsb_release_info(): - # type: () -> Dict[str, str] +def lsb_release_info() -> Dict[str, str]: """ Return a dictionary containing key-value pairs for the information items from the lsb_release command data source of the current OS distribution. @@ -512,8 +507,7 @@ def lsb_release_info(): return _distro.lsb_release_info() -def distro_release_info(): - # type: () -> Dict[str, str] +def distro_release_info() -> Dict[str, str]: """ Return a dictionary containing key-value pairs for the information items from the distro release file data source of the current OS distribution. @@ -523,8 +517,7 @@ def distro_release_info(): return _distro.distro_release_info() -def uname_info(): - # type: () -> Dict[str, str] +def uname_info() -> Dict[str, str]: """ Return a dictionary containing key-value pairs for the information items from the distro release file data source of the current OS distribution. @@ -532,8 +525,7 @@ def uname_info(): return _distro.uname_info() -def os_release_attr(attribute): - # type: (str) -> str +def os_release_attr(attribute: str) -> str: """ Return a single named information item from the os-release file data source of the current OS distribution. @@ -552,8 +544,7 @@ def os_release_attr(attribute): return _distro.os_release_attr(attribute) -def lsb_release_attr(attribute): - # type: (str) -> str +def lsb_release_attr(attribute: str) -> str: """ Return a single named information item from the lsb_release command output data source of the current OS distribution. @@ -573,8 +564,7 @@ def lsb_release_attr(attribute): return _distro.lsb_release_attr(attribute) -def distro_release_attr(attribute): - # type: (str) -> str +def distro_release_attr(attribute: str) -> str: """ Return a single named information item from the distro release file data source of the current OS distribution. @@ -593,8 +583,7 @@ def distro_release_attr(attribute): return _distro.distro_release_attr(attribute) -def uname_attr(attribute): - # type: (str) -> str +def uname_attr(attribute: str) -> str: """ Return a single named information item from the distro release file data source of the current OS distribution. @@ -615,25 +604,23 @@ def uname_attr(attribute): from functools import cached_property except ImportError: # Python < 3.8 - class cached_property(object): # type: ignore + class cached_property: # type: ignore """A version of @property which caches the value. On access, it calls the underlying function and sets the value in `__dict__` so future accesses will not re-call the property. """ - def __init__(self, f): - # type: (Callable[[Any], Any]) -> None + def __init__(self, f: Callable[[Any], Any]) -> None: self._fname = f.__name__ self._f = f - def __get__(self, obj, owner): - # type: (Any, Type[Any]) -> Any - assert obj is not None, "call {} on an instance".format(self._fname) + def __get__(self, obj: Any, owner: Type[Any]) -> Any: + assert obj is not None, f"call {self._fname} on an instance" ret = obj.__dict__[self._fname] = self._f(obj) return ret -class LinuxDistribution(object): +class LinuxDistribution: """ Provides information about a OS distribution. @@ -653,13 +640,13 @@ class LinuxDistribution(object): def __init__( self, - include_lsb=True, - os_release_file="", - distro_release_file="", - include_uname=True, - root_dir=None, - ): - # type: (bool, str, str, bool, Optional[str]) -> None + include_lsb: Optional[bool] = None, + os_release_file: str = "", + distro_release_file: str = "", + include_uname: Optional[bool] = None, + root_dir: Optional[str] = None, + include_oslevel: Optional[bool] = None, + ) -> None: """ The initialization method of this class gathers information from the available data sources, and stores that in private instance attributes. @@ -699,7 +686,13 @@ def __init__( be empty. * ``root_dir`` (string): The absolute path to the root directory to use - to find distro-related information files. + to find distro-related information files. Note that ``include_*`` + parameters must not be enabled in combination with ``root_dir``. + + * ``include_oslevel`` (bool): Controls whether (AIX) oslevel command + output is included as a data source. If the oslevel command is not + available in the program execution path the data source will be + empty. Public instance attributes: @@ -718,14 +711,21 @@ def __init__( parameter. This controls whether the uname information will be loaded. + * ``include_oslevel`` (bool): The result of the ``include_oslevel`` + parameter. This controls whether (AIX) oslevel information will be + loaded. + + * ``root_dir`` (string): The result of the ``root_dir`` parameter. + The absolute path to the root directory to use to find distro-related + information files. + Raises: - * :py:exc:`IOError`: Some I/O issue with an os-release file or distro - release file. + * :py:exc:`ValueError`: Initialization parameters combination is not + supported. - * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had - some issue (other than not being available in the program execution - path). + * :py:exc:`OSError`: Some I/O issue with an os-release file or distro + release file. * :py:exc:`UnicodeError`: A data source has unexpected characters or uses an unexpected encoding. @@ -754,11 +754,24 @@ def __init__( self.os_release_file = usr_lib_os_release_file self.distro_release_file = distro_release_file or "" # updated later - self.include_lsb = include_lsb - self.include_uname = include_uname - def __repr__(self): - # type: () -> str + is_root_dir_defined = root_dir is not None + if is_root_dir_defined and (include_lsb or include_uname or include_oslevel): + raise ValueError( + "Including subprocess data sources from specific root_dir is disallowed" + " to prevent false information" + ) + self.include_lsb = ( + include_lsb if include_lsb is not None else not is_root_dir_defined + ) + self.include_uname = ( + include_uname if include_uname is not None else not is_root_dir_defined + ) + self.include_oslevel = ( + include_oslevel if include_oslevel is not None else not is_root_dir_defined + ) + + def __repr__(self) -> str: """Return repr of all info""" return ( "LinuxDistribution(" @@ -766,14 +779,18 @@ def __repr__(self): "distro_release_file={self.distro_release_file!r}, " "include_lsb={self.include_lsb!r}, " "include_uname={self.include_uname!r}, " + "include_oslevel={self.include_oslevel!r}, " + "root_dir={self.root_dir!r}, " "_os_release_info={self._os_release_info!r}, " "_lsb_release_info={self._lsb_release_info!r}, " "_distro_release_info={self._distro_release_info!r}, " - "_uname_info={self._uname_info!r})".format(self=self) + "_uname_info={self._uname_info!r}, " + "_oslevel_info={self._oslevel_info!r})".format(self=self) ) - def linux_distribution(self, full_distribution_name=True): - # type: (bool) -> Tuple[str, str, str] + def linux_distribution( + self, full_distribution_name: bool = True + ) -> Tuple[str, str, str]: """ Return information about the OS distribution that is compatible with Python's :func:`platform.linux_distribution`, supporting a subset @@ -784,18 +801,16 @@ def linux_distribution(self, full_distribution_name=True): return ( self.name() if full_distribution_name else self.id(), self.version(), - self.codename(), + self._os_release_info.get("release_codename") or self.codename(), ) - def id(self): - # type: () -> str + def id(self) -> str: """Return the distro ID of the OS distribution, as a string. For details, see :func:`distro.id`. """ - def normalize(distro_id, table): - # type: (str, Dict[str, str]) -> str + def normalize(distro_id: str, table: Dict[str, str]) -> str: distro_id = distro_id.lower().replace(" ", "_") return table.get(distro_id, distro_id) @@ -817,8 +832,7 @@ def normalize(distro_id, table): return "" - def name(self, pretty=False): - # type: (bool) -> str + def name(self, pretty: bool = False) -> str: """ Return the name of the OS distribution, as a string. @@ -838,11 +852,10 @@ def name(self, pretty=False): name = self.distro_release_attr("name") or self.uname_attr("name") version = self.version(pretty=True) if version: - name = name + " " + version + name = f"{name} {version}" return name or "" - def version(self, pretty=False, best=False): - # type: (bool, bool) -> str + def version(self, pretty: bool = False, best: bool = False) -> str: """ Return the version of the OS distribution, as a string. @@ -860,6 +873,9 @@ def version(self, pretty=False, best=False): ).get("version_id", ""), self.uname_attr("release"), ] + if self.uname_attr("id").startswith("aix"): + # On AIX platforms, prefer oslevel command output. + versions.insert(0, self.oslevel_info()) version = "" if best: # This algorithm uses the last version in priority order that has @@ -875,11 +891,10 @@ def version(self, pretty=False, best=False): version = v break if pretty and version and self.codename(): - version = "{0} ({1})".format(version, self.codename()) + version = f"{version} ({self.codename()})" return version - def version_parts(self, best=False): - # type: (bool) -> Tuple[str, str, str] + def version_parts(self, best: bool = False) -> Tuple[str, str, str]: """ Return the version of the OS distribution, as a tuple of version numbers. @@ -895,8 +910,7 @@ def version_parts(self, best=False): return major, minor or "", build_number or "" return "", "", "" - def major_version(self, best=False): - # type: (bool) -> str + def major_version(self, best: bool = False) -> str: """ Return the major version number of the current distribution. @@ -904,8 +918,7 @@ def major_version(self, best=False): """ return self.version_parts(best)[0] - def minor_version(self, best=False): - # type: (bool) -> str + def minor_version(self, best: bool = False) -> str: """ Return the minor version number of the current distribution. @@ -913,8 +926,7 @@ def minor_version(self, best=False): """ return self.version_parts(best)[1] - def build_number(self, best=False): - # type: (bool) -> str + def build_number(self, best: bool = False) -> str: """ Return the build number of the current distribution. @@ -922,8 +934,7 @@ def build_number(self, best=False): """ return self.version_parts(best)[2] - def like(self): - # type: () -> str + def like(self) -> str: """ Return the IDs of distributions that are like the OS distribution. @@ -931,8 +942,7 @@ def like(self): """ return self.os_release_attr("id_like") or "" - def codename(self): - # type: () -> str + def codename(self) -> str: """ Return the codename of the OS distribution. @@ -949,8 +959,7 @@ def codename(self): or "" ) - def info(self, pretty=False, best=False): - # type: (bool, bool) -> InfoDict + def info(self, pretty: bool = False, best: bool = False) -> InfoDict: """ Return certain machine-readable information about the OS distribution. @@ -969,8 +978,7 @@ def info(self, pretty=False, best=False): codename=self.codename(), ) - def os_release_info(self): - # type: () -> Dict[str, str] + def os_release_info(self) -> Dict[str, str]: """ Return a dictionary containing key-value pairs for the information items from the os-release file data source of the OS distribution. @@ -979,8 +987,7 @@ def os_release_info(self): """ return self._os_release_info - def lsb_release_info(self): - # type: () -> Dict[str, str] + def lsb_release_info(self) -> Dict[str, str]: """ Return a dictionary containing key-value pairs for the information items from the lsb_release command data source of the OS @@ -990,8 +997,7 @@ def lsb_release_info(self): """ return self._lsb_release_info - def distro_release_info(self): - # type: () -> Dict[str, str] + def distro_release_info(self) -> Dict[str, str]: """ Return a dictionary containing key-value pairs for the information items from the distro release file data source of the OS @@ -1001,8 +1007,7 @@ def distro_release_info(self): """ return self._distro_release_info - def uname_info(self): - # type: () -> Dict[str, str] + def uname_info(self) -> Dict[str, str]: """ Return a dictionary containing key-value pairs for the information items from the uname command data source of the OS distribution. @@ -1011,8 +1016,13 @@ def uname_info(self): """ return self._uname_info - def os_release_attr(self, attribute): - # type: (str) -> str + def oslevel_info(self) -> str: + """ + Return AIX' oslevel command output. + """ + return self._oslevel_info + + def os_release_attr(self, attribute: str) -> str: """ Return a single named information item from the os-release file data source of the OS distribution. @@ -1021,8 +1031,7 @@ def os_release_attr(self, attribute): """ return self._os_release_info.get(attribute, "") - def lsb_release_attr(self, attribute): - # type: (str) -> str + def lsb_release_attr(self, attribute: str) -> str: """ Return a single named information item from the lsb_release command output data source of the OS distribution. @@ -1031,8 +1040,7 @@ def lsb_release_attr(self, attribute): """ return self._lsb_release_info.get(attribute, "") - def distro_release_attr(self, attribute): - # type: (str) -> str + def distro_release_attr(self, attribute: str) -> str: """ Return a single named information item from the distro release file data source of the OS distribution. @@ -1041,8 +1049,7 @@ def distro_release_attr(self, attribute): """ return self._distro_release_info.get(attribute, "") - def uname_attr(self, attribute): - # type: (str) -> str + def uname_attr(self, attribute: str) -> str: """ Return a single named information item from the uname command output data source of the OS distribution. @@ -1052,8 +1059,7 @@ def uname_attr(self, attribute): return self._uname_info.get(attribute, "") @cached_property - def _os_release_info(self): - # type: () -> Dict[str, str] + def _os_release_info(self) -> Dict[str, str]: """ Get the information items from the specified os-release file. @@ -1061,13 +1067,12 @@ def _os_release_info(self): A dictionary containing all information items. """ if os.path.isfile(self.os_release_file): - with open(self.os_release_file) as release_file: + with open(self.os_release_file, encoding="utf-8") as release_file: return self._parse_os_release_content(release_file) return {} @staticmethod - def _parse_os_release_content(lines): - # type: (TextIO) -> Dict[str, str] + def _parse_os_release_content(lines: TextIO) -> Dict[str, str]: """ Parse the lines of an os-release file. @@ -1084,16 +1089,6 @@ def _parse_os_release_content(lines): lexer = shlex.shlex(lines, posix=True) lexer.whitespace_split = True - # The shlex module defines its `wordchars` variable using literals, - # making it dependent on the encoding of the Python source file. - # In Python 2.6 and 2.7, the shlex source file is encoded in - # 'iso-8859-1', and the `wordchars` variable is defined as a byte - # string. This causes a UnicodeDecodeError to be raised when the - # parsed content is a unicode object. The following fix resolves that - # (... but it should be fixed in shlex...): - if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes): - lexer.wordchars = lexer.wordchars.decode("iso-8859-1") - tokens = list(lexer) for token in tokens: # At this point, all shell-like parsing has been done (i.e. @@ -1102,12 +1097,17 @@ def _parse_os_release_content(lines): # stripped, etc.), so the tokens are now either: # * variable assignments: var=value # * commands or their arguments (not allowed in os-release) + # Ignore any tokens that are not variable assignments if "=" in token: k, v = token.split("=", 1) props[k.lower()] = v - else: - # Ignore any tokens that are not variable assignments - pass + + if "version" in props: + # extract release codename (if any) from version attribute + match = re.search(r"\((\D+)\)|,\s*(\D+)", props["version"]) + if match: + release_codename = match.group(1) or match.group(2) + props["codename"] = props["release_codename"] = release_codename if "version_codename" in props: # os-release added a version_codename field. Use that in @@ -1118,22 +1118,11 @@ def _parse_os_release_content(lines): elif "ubuntu_codename" in props: # Same as above but a non-standard field name used on older Ubuntus props["codename"] = props["ubuntu_codename"] - elif "version" in props: - # If there is no version_codename, parse it from the version - match = re.search(r"(\(\D+\))|,(\s+)?\D+", props["version"]) - if match: - codename = match.group() - codename = codename.strip("()") - codename = codename.strip(",") - codename = codename.strip() - # codename appears within paranthese. - props["codename"] = codename return props @cached_property - def _lsb_release_info(self): - # type: () -> Dict[str, str] + def _lsb_release_info(self) -> Dict[str, str]: """ Get the information items from the lsb_release command output. @@ -1142,19 +1131,17 @@ def _lsb_release_info(self): """ if not self.include_lsb: return {} - with open(os.devnull, "wb") as devnull: - try: - cmd = ("lsb_release", "-a") - stdout = subprocess.check_output(cmd, stderr=devnull) - # Command not found or lsb_release returned error - except (OSError, subprocess.CalledProcessError): - return {} + try: + cmd = ("lsb_release", "-a") + stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) + # Command not found or lsb_release returned error + except (OSError, subprocess.CalledProcessError): + return {} content = self._to_str(stdout).splitlines() return self._parse_lsb_release_content(content) @staticmethod - def _parse_lsb_release_content(lines): - # type: (Iterable[str]) -> Dict[str, str] + def _parse_lsb_release_content(lines: Iterable[str]) -> Dict[str, str]: """ Parse the output of the lsb_release command. @@ -1178,20 +1165,31 @@ def _parse_lsb_release_content(lines): return props @cached_property - def _uname_info(self): - # type: () -> Dict[str, str] - with open(os.devnull, "wb") as devnull: - try: - cmd = ("uname", "-rs") - stdout = subprocess.check_output(cmd, stderr=devnull) - except OSError: - return {} + def _uname_info(self) -> Dict[str, str]: + if not self.include_uname: + return {} + try: + cmd = ("uname", "-rs") + stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) + except OSError: + return {} content = self._to_str(stdout).splitlines() return self._parse_uname_content(content) + @cached_property + def _oslevel_info(self) -> str: + if not self.include_oslevel: + return "" + try: + stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL) + except (OSError, subprocess.CalledProcessError): + return "" + return self._to_str(stdout).strip() + @staticmethod - def _parse_uname_content(lines): - # type: (Sequence[str]) -> Dict[str, str] + def _parse_uname_content(lines: Sequence[str]) -> Dict[str, str]: + if not lines: + return {} props = {} match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip()) if match: @@ -1208,23 +1206,12 @@ def _parse_uname_content(lines): return props @staticmethod - def _to_str(text): - # type: (Union[bytes, str]) -> str + def _to_str(bytestring: bytes) -> str: encoding = sys.getfilesystemencoding() - encoding = "utf-8" if encoding == "ascii" else encoding - - if sys.version_info[0] >= 3: - if isinstance(text, bytes): - return text.decode(encoding) - else: - if isinstance(text, unicode): # noqa - return text.encode(encoding) - - return text + return bytestring.decode(encoding) @cached_property - def _distro_release_info(self): - # type: () -> Dict[str, str] + def _distro_release_info(self) -> Dict[str, str]: """ Get the information items from the specified distro release file. @@ -1272,6 +1259,7 @@ def _distro_release_info(self): "manjaro-release", "oracle-release", "redhat-release", + "rocky-release", "sl-release", "slackware-version", ] @@ -1291,8 +1279,7 @@ def _distro_release_info(self): return distro_info return {} - def _parse_distro_release_file(self, filepath): - # type: (str) -> Dict[str, str] + def _parse_distro_release_file(self, filepath: str) -> Dict[str, str]: """ Parse a distro release file. @@ -1304,19 +1291,18 @@ def _parse_distro_release_file(self, filepath): A dictionary containing all information items. """ try: - with open(filepath) as fp: + with open(filepath, encoding="utf-8") as fp: # Only parse the first line. For instance, on SLES there # are multiple lines. We don't want them... return self._parse_distro_release_content(fp.readline()) - except (OSError, IOError): + except OSError: # Ignore not being able to read a specific, seemingly version # related file. # See https://github.com/python-distro/distro/issues/162 return {} @staticmethod - def _parse_distro_release_content(line): - # type: (str) -> Dict[str, str] + def _parse_distro_release_content(line: str) -> Dict[str, str]: """ Parse a line from a distro release file. @@ -1344,8 +1330,7 @@ def _parse_distro_release_content(line): _distro = LinuxDistribution() -def main(): - # type: () -> None +def main() -> None: logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) logger.addHandler(logging.StreamHandler(sys.stdout)) @@ -1367,7 +1352,10 @@ def main(): if args.root_dir: dist = LinuxDistribution( - include_lsb=False, include_uname=False, root_dir=args.root_dir + include_lsb=False, + include_uname=False, + include_oslevel=False, + root_dir=args.root_dir, ) else: dist = _distro diff --git a/pipenv/patched/notpip/_vendor/html5lib/LICENSE b/pipenv/patched/notpip/_vendor/html5lib/LICENSE new file mode 100644 index 0000000000..c87fa7a000 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/html5lib/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2006-2013 James Graham and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/pipenv/patched/notpip/_vendor/idna/LICENSE.md b/pipenv/patched/notpip/_vendor/idna/LICENSE.md new file mode 100644 index 0000000000..b6f87326ff --- /dev/null +++ b/pipenv/patched/notpip/_vendor/idna/LICENSE.md @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2013-2021, Kim Davies +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pipenv/patched/notpip/_vendor/msgpack/COPYING b/pipenv/patched/notpip/_vendor/msgpack/COPYING new file mode 100644 index 0000000000..f067af3aae --- /dev/null +++ b/pipenv/patched/notpip/_vendor/msgpack/COPYING @@ -0,0 +1,14 @@ +Copyright (C) 2008-2011 INADA Naoki + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/pipenv/patched/notpip/_vendor/packaging/LICENSE b/pipenv/patched/notpip/_vendor/packaging/LICENSE new file mode 100644 index 0000000000..6f62d44e4e --- /dev/null +++ b/pipenv/patched/notpip/_vendor/packaging/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made +under the terms of *both* these licenses. diff --git a/pipenv/patched/notpip/_vendor/packaging/LICENSE.APACHE b/pipenv/patched/notpip/_vendor/packaging/LICENSE.APACHE new file mode 100644 index 0000000000..f433b1a53f --- /dev/null +++ b/pipenv/patched/notpip/_vendor/packaging/LICENSE.APACHE @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/pipenv/patched/notpip/_vendor/packaging/LICENSE.BSD b/pipenv/patched/notpip/_vendor/packaging/LICENSE.BSD new file mode 100644 index 0000000000..42ce7b75c9 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/packaging/LICENSE.BSD @@ -0,0 +1,23 @@ +Copyright (c) Donald Stufft and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pipenv/patched/notpip/_vendor/pep517/LICENSE b/pipenv/patched/notpip/_vendor/pep517/LICENSE new file mode 100644 index 0000000000..b0ae9dbc26 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/pep517/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017 Thomas Kluyver + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/pipenv/patched/notpip/_vendor/pkg_resources/LICENSE b/pipenv/patched/notpip/_vendor/pkg_resources/LICENSE new file mode 100644 index 0000000000..6e0693b4b0 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/pkg_resources/LICENSE @@ -0,0 +1,19 @@ +Copyright (C) 2016 Jason R Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/pipenv/patched/notpip/_vendor/platformdirs/__init__.py b/pipenv/patched/notpip/_vendor/platformdirs/__init__.py index 7749e5e119..7a7c32c699 100644 --- a/pipenv/patched/notpip/_vendor/platformdirs/__init__.py +++ b/pipenv/patched/notpip/_vendor/platformdirs/__init__.py @@ -4,7 +4,6 @@ """ from __future__ import annotations -import importlib import os import sys from pathlib import Path @@ -18,16 +17,26 @@ def _set_platform_dir_class() -> type[PlatformDirsABC]: - if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system": - module, name = "pipenv.patched.notpip._vendor.platformdirs.android", "Android" - elif sys.platform == "win32": - module, name = "pipenv.patched.notpip._vendor.platformdirs.windows", "Windows" + if sys.platform == "win32": + from pipenv.patched.notpip._vendor.platformdirs.windows import Windows as Result elif sys.platform == "darwin": - module, name = "pipenv.patched.notpip._vendor.platformdirs.macos", "MacOS" + from pipenv.patched.notpip._vendor.platformdirs.macos import MacOS as Result else: - module, name = "pipenv.patched.notpip._vendor.platformdirs.unix", "Unix" - result: type[PlatformDirsABC] = getattr(importlib.import_module(module), name) - return result + from pipenv.patched.notpip._vendor.platformdirs.unix import Unix as Result + + if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system": + + if os.getenv("SHELL") is not None: + return Result + + from pipenv.patched.notpip._vendor.platformdirs.android import _android_folder + + if _android_folder() is not None: + from pipenv.patched.notpip._vendor.platformdirs.android import Android + + return Android # return to avoid redefinition of result + + return Result PlatformDirs = _set_platform_dir_class() #: Currently active platform diff --git a/pipenv/patched/notpip/_vendor/platformdirs/android.py b/pipenv/patched/notpip/_vendor/platformdirs/android.py index a68405871f..eda8093512 100644 --- a/pipenv/patched/notpip/_vendor/platformdirs/android.py +++ b/pipenv/patched/notpip/_vendor/platformdirs/android.py @@ -4,6 +4,7 @@ import re import sys from functools import lru_cache +from typing import cast from .api import PlatformDirsABC @@ -18,7 +19,7 @@ class Android(PlatformDirsABC): @property def user_data_dir(self) -> str: """:return: data directory tied to the user, e.g. ``/data/user///files/``""" - return self._append_app_name_and_version(_android_folder(), "files") + return self._append_app_name_and_version(cast(str, _android_folder()), "files") @property def site_data_dir(self) -> str: @@ -30,7 +31,7 @@ def user_config_dir(self) -> str: """ :return: config directory tied to the user, e.g. ``/data/user///shared_prefs/`` """ - return self._append_app_name_and_version(_android_folder(), "shared_prefs") + return self._append_app_name_and_version(cast(str, _android_folder()), "shared_prefs") @property def site_config_dir(self) -> str: @@ -40,7 +41,7 @@ def site_config_dir(self) -> str: @property def user_cache_dir(self) -> str: """:return: cache directory tied to the user, e.g. e.g. ``/data/user///cache/``""" - return self._append_app_name_and_version(_android_folder(), "cache") + return self._append_app_name_and_version(cast(str, _android_folder()), "cache") @property def user_state_dir(self) -> str: @@ -78,14 +79,14 @@ def user_runtime_dir(self) -> str: @lru_cache(maxsize=1) -def _android_folder() -> str: - """:return: base folder for the Android OS""" +def _android_folder() -> str | None: + """:return: base folder for the Android OS or None if cannot be found""" try: # First try to get path to android app via pyjnius from jnius import autoclass Context = autoclass("android.content.Context") # noqa: N806 - result: str = Context.getFilesDir().getParentFile().getAbsolutePath() + result: str | None = Context.getFilesDir().getParentFile().getAbsolutePath() except Exception: # if fails find an android folder looking path on the sys.path pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files") @@ -94,7 +95,7 @@ def _android_folder() -> str: result = path.split("/files")[0] break else: - raise OSError("Cannot find path to android app folder") + result = None return result diff --git a/pipenv/patched/notpip/_vendor/platformdirs/version.py b/pipenv/patched/notpip/_vendor/platformdirs/version.py index 175ded8561..4552c02aff 100644 --- a/pipenv/patched/notpip/_vendor/platformdirs/version.py +++ b/pipenv/patched/notpip/_vendor/platformdirs/version.py @@ -1,4 +1,4 @@ -""" Version information """ +"""Version information""" -__version__ = "2.4.1" -__version_info__ = (2, 4, 1) +__version__ = "2.5.2" +__version_info__ = (2, 5, 2) diff --git a/pipenv/patched/notpip/_vendor/progress/__init__.py b/pipenv/patched/notpip/_vendor/progress/__init__.py deleted file mode 100644 index b434b300ad..0000000000 --- a/pipenv/patched/notpip/_vendor/progress/__init__.py +++ /dev/null @@ -1,189 +0,0 @@ -# Copyright (c) 2012 Georgios Verigakis -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -from __future__ import division, print_function - -from collections import deque -from datetime import timedelta -from math import ceil -from sys import stderr -try: - from time import monotonic -except ImportError: - from time import time as monotonic - - -__version__ = '1.6' - -HIDE_CURSOR = '\x1b[?25l' -SHOW_CURSOR = '\x1b[?25h' - - -class Infinite(object): - file = stderr - sma_window = 10 # Simple Moving Average window - check_tty = True - hide_cursor = True - - def __init__(self, message='', **kwargs): - self.index = 0 - self.start_ts = monotonic() - self.avg = 0 - self._avg_update_ts = self.start_ts - self._ts = self.start_ts - self._xput = deque(maxlen=self.sma_window) - for key, val in kwargs.items(): - setattr(self, key, val) - - self._max_width = 0 - self._hidden_cursor = False - self.message = message - - if self.file and self.is_tty(): - if self.hide_cursor: - print(HIDE_CURSOR, end='', file=self.file) - self._hidden_cursor = True - self.writeln('') - - def __del__(self): - if self._hidden_cursor: - print(SHOW_CURSOR, end='', file=self.file) - - def __getitem__(self, key): - if key.startswith('_'): - return None - return getattr(self, key, None) - - @property - def elapsed(self): - return int(monotonic() - self.start_ts) - - @property - def elapsed_td(self): - return timedelta(seconds=self.elapsed) - - def update_avg(self, n, dt): - if n > 0: - xput_len = len(self._xput) - self._xput.append(dt / n) - now = monotonic() - # update when we're still filling _xput, then after every second - if (xput_len < self.sma_window or - now - self._avg_update_ts > 1): - self.avg = sum(self._xput) / len(self._xput) - self._avg_update_ts = now - - def update(self): - pass - - def start(self): - pass - - def writeln(self, line): - if self.file and self.is_tty(): - width = len(line) - if width < self._max_width: - # Add padding to cover previous contents - line += ' ' * (self._max_width - width) - else: - self._max_width = width - print('\r' + line, end='', file=self.file) - self.file.flush() - - def finish(self): - if self.file and self.is_tty(): - print(file=self.file) - if self._hidden_cursor: - print(SHOW_CURSOR, end='', file=self.file) - self._hidden_cursor = False - - def is_tty(self): - try: - return self.file.isatty() if self.check_tty else True - except AttributeError: - msg = "%s has no attribute 'isatty'. Try setting check_tty=False." % self - raise AttributeError(msg) - - def next(self, n=1): - now = monotonic() - dt = now - self._ts - self.update_avg(n, dt) - self._ts = now - self.index = self.index + n - self.update() - - def iter(self, it): - self.iter_value = None - with self: - for x in it: - self.iter_value = x - yield x - self.next() - del self.iter_value - - def __enter__(self): - self.start() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.finish() - - -class Progress(Infinite): - def __init__(self, *args, **kwargs): - super(Progress, self).__init__(*args, **kwargs) - self.max = kwargs.get('max', 100) - - @property - def eta(self): - return int(ceil(self.avg * self.remaining)) - - @property - def eta_td(self): - return timedelta(seconds=self.eta) - - @property - def percent(self): - return self.progress * 100 - - @property - def progress(self): - if self.max == 0: - return 0 - return min(1, self.index / self.max) - - @property - def remaining(self): - return max(self.max - self.index, 0) - - def start(self): - self.update() - - def goto(self, index): - incr = index - self.index - self.next(incr) - - def iter(self, it): - try: - self.max = len(it) - except TypeError: - pass - - self.iter_value = None - with self: - for x in it: - self.iter_value = x - yield x - self.next() - del self.iter_value diff --git a/pipenv/patched/notpip/_vendor/progress/bar.py b/pipenv/patched/notpip/_vendor/progress/bar.py deleted file mode 100644 index df4e8b61f8..0000000000 --- a/pipenv/patched/notpip/_vendor/progress/bar.py +++ /dev/null @@ -1,93 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) 2012 Georgios Verigakis -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -from __future__ import unicode_literals - -import sys - -from . import Progress -from .colors import color - - -class Bar(Progress): - width = 32 - suffix = '%(index)d/%(max)d' - bar_prefix = ' |' - bar_suffix = '| ' - empty_fill = ' ' - fill = '#' - color = None - - def update(self): - filled_length = int(self.width * self.progress) - empty_length = self.width - filled_length - - message = self.message % self - bar = color(self.fill * filled_length, fg=self.color) - empty = self.empty_fill * empty_length - suffix = self.suffix % self - line = ''.join([message, self.bar_prefix, bar, empty, self.bar_suffix, - suffix]) - self.writeln(line) - - -class ChargingBar(Bar): - suffix = '%(percent)d%%' - bar_prefix = ' ' - bar_suffix = ' ' - empty_fill = '∙' - fill = '█' - - -class FillingSquaresBar(ChargingBar): - empty_fill = '▢' - fill = '▣' - - -class FillingCirclesBar(ChargingBar): - empty_fill = '◯' - fill = '◉' - - -class IncrementalBar(Bar): - if sys.platform.startswith('win'): - phases = (u' ', u'▌', u'█') - else: - phases = (' ', '▏', '▎', '▍', '▌', '▋', '▊', '▉', '█') - - def update(self): - nphases = len(self.phases) - filled_len = self.width * self.progress - nfull = int(filled_len) # Number of full chars - phase = int((filled_len - nfull) * nphases) # Phase of last char - nempty = self.width - nfull # Number of empty chars - - message = self.message % self - bar = color(self.phases[-1] * nfull, fg=self.color) - current = self.phases[phase] if phase > 0 else '' - empty = self.empty_fill * max(0, nempty - len(current)) - suffix = self.suffix % self - line = ''.join([message, self.bar_prefix, bar, current, empty, - self.bar_suffix, suffix]) - self.writeln(line) - - -class PixelBar(IncrementalBar): - phases = ('⡀', '⡄', '⡆', '⡇', '⣇', '⣧', '⣷', '⣿') - - -class ShadyBar(IncrementalBar): - phases = (' ', '░', '▒', '▓', '█') diff --git a/pipenv/patched/notpip/_vendor/progress/colors.py b/pipenv/patched/notpip/_vendor/progress/colors.py deleted file mode 100644 index 4e770f868b..0000000000 --- a/pipenv/patched/notpip/_vendor/progress/colors.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) 2020 Georgios Verigakis -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -from functools import partial - - -COLORS = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', - 'white') -STYLES = ('bold', 'faint', 'italic', 'underline', 'blink', 'blink2', - 'negative', 'concealed', 'crossed') - - -def color(s, fg=None, bg=None, style=None): - sgr = [] - - if fg: - if fg in COLORS: - sgr.append(str(30 + COLORS.index(fg))) - elif isinstance(fg, int) and 0 <= fg <= 255: - sgr.append('38;5;%d' % int(fg)) - else: - raise Exception('Invalid color "%s"' % fg) - - if bg: - if bg in COLORS: - sgr.append(str(40 + COLORS.index(bg))) - elif isinstance(bg, int) and 0 <= bg <= 255: - sgr.append('48;5;%d' % bg) - else: - raise Exception('Invalid color "%s"' % bg) - - if style: - for st in style.split('+'): - if st in STYLES: - sgr.append(str(1 + STYLES.index(st))) - else: - raise Exception('Invalid style "%s"' % st) - - if sgr: - prefix = '\x1b[' + ';'.join(sgr) + 'm' - suffix = '\x1b[0m' - return prefix + s + suffix - else: - return s - - -# Foreground shortcuts -black = partial(color, fg='black') -red = partial(color, fg='red') -green = partial(color, fg='green') -yellow = partial(color, fg='yellow') -blue = partial(color, fg='blue') -magenta = partial(color, fg='magenta') -cyan = partial(color, fg='cyan') -white = partial(color, fg='white') - -# Style shortcuts -bold = partial(color, style='bold') -faint = partial(color, style='faint') -italic = partial(color, style='italic') -underline = partial(color, style='underline') -blink = partial(color, style='blink') -blink2 = partial(color, style='blink2') -negative = partial(color, style='negative') -concealed = partial(color, style='concealed') -crossed = partial(color, style='crossed') diff --git a/pipenv/patched/notpip/_vendor/progress/counter.py b/pipenv/patched/notpip/_vendor/progress/counter.py deleted file mode 100644 index d0fbe7ef35..0000000000 --- a/pipenv/patched/notpip/_vendor/progress/counter.py +++ /dev/null @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) 2012 Georgios Verigakis -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -from __future__ import unicode_literals -from . import Infinite, Progress - - -class Counter(Infinite): - def update(self): - message = self.message % self - line = ''.join([message, str(self.index)]) - self.writeln(line) - - -class Countdown(Progress): - def update(self): - message = self.message % self - line = ''.join([message, str(self.remaining)]) - self.writeln(line) - - -class Stack(Progress): - phases = (' ', '▁', '▂', '▃', '▄', '▅', '▆', '▇', '█') - - def update(self): - nphases = len(self.phases) - i = min(nphases - 1, int(self.progress * nphases)) - message = self.message % self - line = ''.join([message, self.phases[i]]) - self.writeln(line) - - -class Pie(Stack): - phases = ('○', '◔', '◑', '◕', '●') diff --git a/pipenv/patched/notpip/_vendor/progress/spinner.py b/pipenv/patched/notpip/_vendor/progress/spinner.py deleted file mode 100644 index d593a203e0..0000000000 --- a/pipenv/patched/notpip/_vendor/progress/spinner.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright (c) 2012 Georgios Verigakis -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -from __future__ import unicode_literals -from . import Infinite - - -class Spinner(Infinite): - phases = ('-', '\\', '|', '/') - hide_cursor = True - - def update(self): - i = self.index % len(self.phases) - message = self.message % self - line = ''.join([message, self.phases[i]]) - self.writeln(line) - - -class PieSpinner(Spinner): - phases = ['◷', '◶', '◵', '◴'] - - -class MoonSpinner(Spinner): - phases = ['◑', '◒', '◐', '◓'] - - -class LineSpinner(Spinner): - phases = ['⎺', '⎻', '⎼', '⎽', '⎼', '⎻'] - - -class PixelSpinner(Spinner): - phases = ['⣾', '⣷', '⣯', '⣟', '⡿', '⢿', '⣻', '⣽'] diff --git a/pipenv/patched/notpip/_vendor/pygments/LICENSE b/pipenv/patched/notpip/_vendor/pygments/LICENSE new file mode 100644 index 0000000000..e1b15663d9 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/pygments/LICENSE @@ -0,0 +1,25 @@ +Copyright (c) 2006-2021 by the respective authors (see AUTHORS file). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pipenv/patched/notpip/pyparsing.LICENSE b/pipenv/patched/notpip/_vendor/pyparsing/LICENSE similarity index 100% rename from pipenv/patched/notpip/pyparsing.LICENSE rename to pipenv/patched/notpip/_vendor/pyparsing/LICENSE diff --git a/pipenv/patched/notpip/_vendor/pyparsing/__init__.py b/pipenv/patched/notpip/_vendor/pyparsing/__init__.py index 9ea189a84d..c69eb53bd7 100644 --- a/pipenv/patched/notpip/_vendor/pyparsing/__init__.py +++ b/pipenv/patched/notpip/_vendor/pyparsing/__init__.py @@ -1,6 +1,6 @@ # module pyparsing.py # -# Copyright (c) 2003-2021 Paul T. McGuire +# Copyright (c) 2003-2022 Paul T. McGuire # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -105,14 +105,17 @@ class version_info(NamedTuple): @property def __version__(self): - return "{}.{}.{}".format(self.major, self.minor, self.micro) + ( - "{}{}{}".format( - "r" if self.releaselevel[0] == "c" else "", - self.releaselevel[0], - self.serial, - ), - "", - )[self.releaselevel == "final"] + return ( + "{}.{}.{}".format(self.major, self.minor, self.micro) + + ( + "{}{}{}".format( + "r" if self.releaselevel[0] == "c" else "", + self.releaselevel[0], + self.serial, + ), + "", + )[self.releaselevel == "final"] + ) def __str__(self): return "{} {} / {}".format(__name__, self.__version__, __version_time__) @@ -125,8 +128,8 @@ def __repr__(self): ) -__version_info__ = version_info(3, 0, 7, "final", 0) -__version_time__ = "15 Jan 2022 04:10 UTC" +__version_info__ = version_info(3, 0, 8, "final", 0) +__version_time__ = "09 Apr 2022 23:29 UTC" __version__ = __version_info__.__version__ __versionTime__ = __version_time__ __author__ = "Paul McGuire " diff --git a/pipenv/patched/notpip/_vendor/pyparsing/core.py b/pipenv/patched/notpip/_vendor/pyparsing/core.py index 24672872b0..6969649260 100644 --- a/pipenv/patched/notpip/_vendor/pyparsing/core.py +++ b/pipenv/patched/notpip/_vendor/pyparsing/core.py @@ -23,7 +23,6 @@ import copy import warnings import re -import sre_constants import sys from collections.abc import Iterable import traceback @@ -53,7 +52,7 @@ str_type: Tuple[type, ...] = (str, bytes) # -# Copyright (c) 2003-2021 Paul T. McGuire +# Copyright (c) 2003-2022 Paul T. McGuire # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the @@ -76,6 +75,19 @@ # +if sys.version_info >= (3, 8): + from functools import cached_property +else: + + class cached_property: + def __init__(self, func): + self._func = func + + def __get__(self, instance, owner=None): + ret = instance.__dict__[self._func.__name__] = self._func(instance) + return ret + + class __compat__(__config_flags): """ A cross-version compatibility configuration for pyparsing features that will be @@ -246,10 +258,10 @@ def _should_enable_warnings( alphanums = alphas + nums printables = "".join([c for c in string.printable if c not in string.whitespace]) -_trim_arity_call_line = None +_trim_arity_call_line: traceback.StackSummary = None -def _trim_arity(func, maxargs=2): +def _trim_arity(func, max_limit=3): """decorator to trim function calls to match the arity of the target""" global _trim_arity_call_line @@ -267,16 +279,12 @@ def extract_tb(tb, limit=0): # synthesize what would be returned by traceback.extract_stack at the call to # user's parse action 'func', so that we don't incur call penalty at parse time - LINE_DIFF = 11 + # fmt: off + LINE_DIFF = 7 # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! - _trim_arity_call_line = ( - _trim_arity_call_line or traceback.extract_stack(limit=2)[-1] - ) - pa_call_line_synth = ( - _trim_arity_call_line[0], - _trim_arity_call_line[1] + LINE_DIFF, - ) + _trim_arity_call_line = (_trim_arity_call_line or traceback.extract_stack(limit=2)[-1]) + pa_call_line_synth = (_trim_arity_call_line[0], _trim_arity_call_line[1] + LINE_DIFF) def wrapper(*args): nonlocal found_arity, limit @@ -297,16 +305,18 @@ def wrapper(*args): del tb if trim_arity_type_error: - if limit <= maxargs: + if limit < max_limit: limit += 1 continue raise + # fmt: on # copy func name to wrapper for sensible debug output # (can't use functools.wraps, since that messes with function signature) func_name = getattr(func, "__name__", getattr(func, "__class__").__name__) wrapper.__name__ = func_name + wrapper.__doc__ = func.__doc__ return wrapper @@ -467,7 +477,6 @@ def __init__(self, savelist: bool = False): self.modalResults = True # custom debug actions self.debugActions = self.DebugActions(None, None, None) - self.re = None # avoid redundant calls to preParse self.callPreparse = True self.callDuringTry = False @@ -1342,7 +1351,7 @@ def split( last = e yield instring[last:] - def __add__(self, other): + def __add__(self, other) -> "ParserElement": """ Implementation of ``+`` operator - returns :class:`And`. Adding strings to a :class:`ParserElement` converts them to :class:`Literal`s by default. @@ -1382,7 +1391,7 @@ def __add__(self, other): ) return And([self, other]) - def __radd__(self, other): + def __radd__(self, other) -> "ParserElement": """ Implementation of ``+`` operator when left operand is not a :class:`ParserElement` """ @@ -1399,7 +1408,7 @@ def __radd__(self, other): ) return other + self - def __sub__(self, other): + def __sub__(self, other) -> "ParserElement": """ Implementation of ``-`` operator, returns :class:`And` with error stop """ @@ -1413,7 +1422,7 @@ def __sub__(self, other): ) return self + And._ErrorStop() + other - def __rsub__(self, other): + def __rsub__(self, other) -> "ParserElement": """ Implementation of ``-`` operator when left operand is not a :class:`ParserElement` """ @@ -1427,7 +1436,7 @@ def __rsub__(self, other): ) return other - self - def __mul__(self, other): + def __mul__(self, other) -> "ParserElement": """ Implementation of ``*`` operator, allows use of ``expr * 3`` in place of ``expr + expr + expr``. Expressions may also be multiplied by a 2-integer @@ -1513,10 +1522,10 @@ def makeOptionalList(n): ret = And([self] * minElements) return ret - def __rmul__(self, other): + def __rmul__(self, other) -> "ParserElement": return self.__mul__(other) - def __or__(self, other): + def __or__(self, other) -> "ParserElement": """ Implementation of ``|`` operator - returns :class:`MatchFirst` """ @@ -1533,7 +1542,7 @@ def __or__(self, other): ) return MatchFirst([self, other]) - def __ror__(self, other): + def __ror__(self, other) -> "ParserElement": """ Implementation of ``|`` operator when left operand is not a :class:`ParserElement` """ @@ -1547,7 +1556,7 @@ def __ror__(self, other): ) return other | self - def __xor__(self, other): + def __xor__(self, other) -> "ParserElement": """ Implementation of ``^`` operator - returns :class:`Or` """ @@ -1561,7 +1570,7 @@ def __xor__(self, other): ) return Or([self, other]) - def __rxor__(self, other): + def __rxor__(self, other) -> "ParserElement": """ Implementation of ``^`` operator when left operand is not a :class:`ParserElement` """ @@ -1575,7 +1584,7 @@ def __rxor__(self, other): ) return other ^ self - def __and__(self, other): + def __and__(self, other) -> "ParserElement": """ Implementation of ``&`` operator - returns :class:`Each` """ @@ -1589,7 +1598,7 @@ def __and__(self, other): ) return Each([self, other]) - def __rand__(self, other): + def __rand__(self, other) -> "ParserElement": """ Implementation of ``&`` operator when left operand is not a :class:`ParserElement` """ @@ -1603,7 +1612,7 @@ def __rand__(self, other): ) return other & self - def __invert__(self): + def __invert__(self) -> "ParserElement": """ Implementation of ``~`` operator - returns :class:`NotAny` """ @@ -1653,7 +1662,7 @@ def __getitem__(self, key): ret = self * tuple(key[:2]) return ret - def __call__(self, name: str = None): + def __call__(self, name: str = None) -> "ParserElement": """ Shortcut for :class:`set_results_name`, with ``list_all_matches=False``. @@ -2140,6 +2149,7 @@ def create_diagram( output_html: Union[TextIO, Path, str], vertical: int = 3, show_results_names: bool = False, + show_groups: bool = False, **kwargs, ) -> None: """ @@ -2152,7 +2162,7 @@ def create_diagram( instead of horizontally (default=3) - show_results_names - bool flag whether diagram should show annotations for defined results names - + - show_groups - bool flag whether groups should be highlighted with an unlabeled surrounding box Additional diagram-formatting keyword arguments can also be included; see railroad.Diagram class. """ @@ -2170,6 +2180,7 @@ def create_diagram( self, vertical=vertical, show_results_names=show_results_names, + show_groups=show_groups, diagram_kwargs=kwargs, ) if isinstance(output_html, (str, Path)): @@ -2219,7 +2230,7 @@ def __init__(self, expr: ParserElement, must_skip: bool = False): def _generateDefaultName(self): return str(self.anchor + Empty()).replace("Empty", "...") - def __add__(self, other): + def __add__(self, other) -> "ParserElement": skipper = SkipTo(other).set_name("...")("_skipped*") if self.must_skip: @@ -2773,7 +2784,7 @@ def __init__( try: self.re = re.compile(self.reString) - except sre_constants.error: + except re.error: self.re = None else: self.re_match = self.re.match @@ -2926,19 +2937,12 @@ def __init__( if not pattern: raise ValueError("null string passed to Regex; use Empty() instead") - self.pattern = pattern + self._re = None + self.reString = self.pattern = pattern self.flags = flags - try: - self.re = re.compile(self.pattern, self.flags) - self.reString = self.pattern - except sre_constants.error: - raise ValueError( - "invalid pattern ({!r}) passed to Regex".format(pattern) - ) - elif hasattr(pattern, "pattern") and hasattr(pattern, "match"): - self.re = pattern + self._re = pattern self.pattern = self.reString = pattern.pattern self.flags = flags @@ -2947,11 +2951,8 @@ def __init__( "Regex may only be constructed with a string or a compiled RE object" ) - self.re_match = self.re.match - self.errmsg = "Expected " + self.name self.mayIndexError = False - self.mayReturnEmpty = self.re_match("") is not None self.asGroupList = asGroupList self.asMatch = asMatch if self.asGroupList: @@ -2959,6 +2960,26 @@ def __init__( if self.asMatch: self.parseImpl = self.parseImplAsMatch + @cached_property + def re(self): + if self._re: + return self._re + else: + try: + return re.compile(self.pattern, self.flags) + except re.error: + raise ValueError( + "invalid pattern ({!r}) passed to Regex".format(self.pattern) + ) + + @cached_property + def re_match(self): + return self.re.match + + @cached_property + def mayReturnEmpty(self): + return self.re_match("") is not None + def _generateDefaultName(self): return "Re:({})".format(repr(self.pattern).replace("\\\\", "\\")) @@ -3168,7 +3189,7 @@ def __init__( self.re = re.compile(self.pattern, self.flags) self.reString = self.pattern self.re_match = self.re.match - except sre_constants.error: + except re.error: raise ValueError( "invalid pattern {!r} passed to Regex".format(self.pattern) ) @@ -3826,7 +3847,9 @@ def streamline(self) -> ParserElement: seen.add(id(cur)) if isinstance(cur, IndentedBlock): prev.add_parse_action( - lambda s, l, t, cur_=cur: setattr(cur_, "parent_anchor", col(l, s)) + lambda s, l, t, cur_=cur: setattr( + cur_, "parent_anchor", col(l, s) + ) ) break subs = cur.recurse() @@ -5002,20 +5025,20 @@ class SkipTo(ParseElementEnhance): prints:: ['101', 'Critical', 'Intermittent system crash', '6'] - - days_open: 6 - - desc: Intermittent system crash - - issue_num: 101 - - sev: Critical + - days_open: '6' + - desc: 'Intermittent system crash' + - issue_num: '101' + - sev: 'Critical' ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] - - days_open: 14 - - desc: Spelling error on Login ('log|n') - - issue_num: 94 - - sev: Cosmetic + - days_open: '14' + - desc: "Spelling error on Login ('log|n')" + - issue_num: '94' + - sev: 'Cosmetic' ['79', 'Minor', 'System slow when running too many reports', '47'] - - days_open: 47 - - desc: System slow when running too many reports - - issue_num: 79 - - sev: Minor + - days_open: '47' + - desc: 'System slow when running too many reports' + - issue_num: '79' + - sev: 'Minor' """ def __init__( @@ -5473,10 +5496,10 @@ class Dict(TokenConverter): ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - - color: light blue - - posn: upper left - - shape: SQUARE - - texture: burlap + - color: 'light blue' + - posn: 'upper left' + - shape: 'SQUARE' + - texture: 'burlap' SQUARE {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} @@ -5564,13 +5587,13 @@ def __init__(self, expr: Union[ParserElement, str], savelist: bool = False): expr = _PendingSkip(NoMatch()) super().__init__(expr) - def __add__(self, other): + def __add__(self, other) -> "ParserElement": if isinstance(self.expr, _PendingSkip): return Suppress(SkipTo(other)) + other else: return super().__add__(other) - def __sub__(self, other): + def __sub__(self, other) -> "ParserElement": if isinstance(self.expr, _PendingSkip): return Suppress(SkipTo(other)) - other else: diff --git a/pipenv/patched/notpip/_vendor/pyparsing/diagram/__init__.py b/pipenv/patched/notpip/_vendor/pyparsing/diagram/__init__.py index 8f90fd5089..22553bea72 100644 --- a/pipenv/patched/notpip/_vendor/pyparsing/diagram/__init__.py +++ b/pipenv/patched/notpip/_vendor/pyparsing/diagram/__init__.py @@ -16,6 +16,7 @@ from io import StringIO import inspect + with open(resource_filename(__name__, "template.jinja2"), encoding="utf-8") as fp: template = Template(fp.read()) @@ -54,7 +55,7 @@ class AnnotatedItem(railroad.Group): """ def __init__(self, label: str, item): - super().__init__(item=item, label="[{}]".format(label)) + super().__init__(item=item, label="[{}]".format(label) if label else label) class EditablePartial(Generic[T]): @@ -137,6 +138,7 @@ def to_railroad( diagram_kwargs: Optional[dict] = None, vertical: int = 3, show_results_names: bool = False, + show_groups: bool = False, ) -> List[NamedDiagram]: """ Convert a pyparsing element tree into a list of diagrams. This is the recommended entrypoint to diagram @@ -147,6 +149,8 @@ def to_railroad( shown vertically instead of horizontally :param show_results_names - bool to indicate whether results name annotations should be included in the diagram + :param show_groups - bool to indicate whether groups should be highlighted with an unlabeled + surrounding box """ # Convert the whole tree underneath the root lookup = ConverterState(diagram_kwargs=diagram_kwargs or {}) @@ -156,6 +160,7 @@ def to_railroad( parent=None, vertical=vertical, show_results_names=show_results_names, + show_groups=show_groups, ) root_id = id(element) @@ -362,6 +367,7 @@ def _inner( index: int = 0, name_hint: str = None, show_results_names: bool = False, + show_groups: bool = False, ) -> Optional[EditablePartial]: ret = fn( @@ -372,6 +378,7 @@ def _inner( index, name_hint, show_results_names, + show_groups, ) # apply annotation for results name, if present @@ -411,6 +418,7 @@ def _to_diagram_element( index: int = 0, name_hint: str = None, show_results_names: bool = False, + show_groups: bool = False, ) -> Optional[EditablePartial]: """ Recursively converts a PyParsing Element to a railroad Element @@ -423,6 +431,7 @@ def _to_diagram_element( :param name_hint: If provided, this will override the generated name :param show_results_names: bool flag indicating whether to add annotations for results names :returns: The converted version of the input element, but as a Partial that hasn't yet been constructed + :param show_groups: bool flag indicating whether to show groups using bounding box """ exprs = element.recurse() name = name_hint or element.customName or element.__class__.__name__ @@ -437,7 +446,7 @@ def _to_diagram_element( if isinstance( element, ( - pyparsing.TokenConverter, + # pyparsing.TokenConverter, # pyparsing.Forward, pyparsing.Located, ), @@ -457,6 +466,7 @@ def _to_diagram_element( index=index, name_hint=propagated_name, show_results_names=show_results_names, + show_groups=show_groups, ) # If the element isn't worth extracting, we always treat it as the first time we say it @@ -510,6 +520,13 @@ def _to_diagram_element( ret = EditablePartial.from_call(AnnotatedItem, label="LOOKAHEAD", item="") elif isinstance(element, pyparsing.PrecededBy): ret = EditablePartial.from_call(AnnotatedItem, label="LOOKBEHIND", item="") + elif isinstance(element, pyparsing.Group): + if show_groups: + ret = EditablePartial.from_call(AnnotatedItem, label="", item="") + else: + ret = EditablePartial.from_call(railroad.Group, label="", item="") + elif isinstance(element, pyparsing.TokenConverter): + ret = EditablePartial.from_call(AnnotatedItem, label=type(element).__name__.lower(), item="") elif isinstance(element, pyparsing.Opt): ret = EditablePartial.from_call(railroad.Optional, item="") elif isinstance(element, pyparsing.OneOrMore): @@ -558,6 +575,7 @@ def _to_diagram_element( vertical=vertical, index=i, show_results_names=show_results_names, + show_groups=show_groups, ) # Some elements don't need to be shown in the diagram diff --git a/pipenv/patched/notpip/_vendor/pyparsing/helpers.py b/pipenv/patched/notpip/_vendor/pyparsing/helpers.py index 5e7b3ad05e..be8a365788 100644 --- a/pipenv/patched/notpip/_vendor/pyparsing/helpers.py +++ b/pipenv/patched/notpip/_vendor/pyparsing/helpers.py @@ -185,7 +185,9 @@ def copy_token_to_repeater(s, l, t): def must_match_these_tokens(s, l, t): theseTokens = _flatten(t.as_list()) if theseTokens != matchTokens: - raise ParseException(s, l, "Expected {}, found{}".format(matchTokens, theseTokens)) + raise ParseException( + s, l, "Expected {}, found{}".format(matchTokens, theseTokens) + ) rep.set_parse_action(must_match_these_tokens, callDuringTry=True) @@ -310,7 +312,7 @@ def one_of( return ret - except sre_constants.error: + except re.error: warnings.warn( "Exception creating Regex for one_of, building MatchFirst", stacklevel=2 ) @@ -350,10 +352,10 @@ def dict_of(key: ParserElement, value: ParserElement) -> ParserElement: prints:: [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - - color: light blue - - posn: upper left - - shape: SQUARE - - texture: burlap + - color: 'light blue' + - posn: 'upper left' + - shape: 'SQUARE' + - texture: 'burlap' SQUARE SQUARE {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} @@ -758,10 +760,14 @@ def infix_notation( a tuple or list of functions, this is equivalent to calling ``set_parse_action(*fn)`` (:class:`ParserElement.set_parse_action`) - - ``lpar`` - expression for matching left-parentheses - (default= ``Suppress('(')``) - - ``rpar`` - expression for matching right-parentheses - (default= ``Suppress(')')``) + - ``lpar`` - expression for matching left-parentheses; if passed as a + str, then will be parsed as Suppress(lpar). If lpar is passed as + an expression (such as ``Literal('(')``), then it will be kept in + the parsed results, and grouped with them. (default= ``Suppress('(')``) + - ``rpar`` - expression for matching right-parentheses; if passed as a + str, then will be parsed as Suppress(rpar). If rpar is passed as + an expression (such as ``Literal(')')``), then it will be kept in + the parsed results, and grouped with them. (default= ``Suppress(')')``) Example:: @@ -803,9 +809,17 @@ def parseImpl(self, instring, loc, doActions=True): _FB.__name__ = "FollowedBy>" ret = Forward() - lpar = Suppress(lpar) - rpar = Suppress(rpar) - lastExpr = base_expr | (lpar + ret + rpar) + if isinstance(lpar, str): + lpar = Suppress(lpar) + if isinstance(rpar, str): + rpar = Suppress(rpar) + + # if lpar and rpar are not suppressed, wrap in group + if not (isinstance(rpar, Suppress) and isinstance(rpar, Suppress)): + lastExpr = base_expr | Group(lpar + ret + rpar) + else: + lastExpr = base_expr | (lpar + ret + rpar) + for i, operDef in enumerate(op_list): opExpr, arity, rightLeftAssoc, pa = (operDef + (None,))[:4] if isinstance(opExpr, str_type): diff --git a/pipenv/patched/notpip/_vendor/pyparsing/results.py b/pipenv/patched/notpip/_vendor/pyparsing/results.py index 9676f45b88..bb444df4e5 100644 --- a/pipenv/patched/notpip/_vendor/pyparsing/results.py +++ b/pipenv/patched/notpip/_vendor/pyparsing/results.py @@ -65,9 +65,9 @@ def test(s, fn=repr): 'month' in result -> True 'minutes' in result -> False result.dump() -> ['1999', '/', '12', '/', '31'] - - day: 31 - - month: 12 - - year: 1999 + - day: '31' + - month: '12' + - year: '1999' """ _null_values: Tuple[Any, ...] = (None, [], "", ()) @@ -301,7 +301,7 @@ def remove_LABEL(tokens): prints:: ['AAB', '123', '321'] - - LABEL: AAB + - LABEL: 'AAB' ['AAB', '123', '321'] """ @@ -603,15 +603,15 @@ def dump(self, indent="", full=True, include_list=True, _depth=0) -> str: integer = Word(nums) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") - result = date_str.parse_string('12/31/1999') + result = date_str.parse_string('1999/12/31') print(result.dump()) prints:: - ['12', '/', '31', '/', '1999'] - - day: 1999 - - month: 31 - - year: 12 + ['1999', '/', '12', '/', '31'] + - day: '31' + - month: '12' + - year: '1999' """ out = [] NL = "\n" diff --git a/pipenv/patched/notpip/_vendor/requests/LICENSE b/pipenv/patched/notpip/_vendor/requests/LICENSE new file mode 100644 index 0000000000..67db858821 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/requests/LICENSE @@ -0,0 +1,175 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/pipenv/patched/notpip/_vendor/resolvelib/LICENSE b/pipenv/patched/notpip/_vendor/resolvelib/LICENSE new file mode 100644 index 0000000000..b9077766e9 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/resolvelib/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2018, Tzu-ping Chung + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/pipenv/patched/notpip/_vendor/rich/__init__.py b/pipenv/patched/notpip/_vendor/rich/__init__.py index 6e16653a76..d27db24981 100644 --- a/pipenv/patched/notpip/_vendor/rich/__init__.py +++ b/pipenv/patched/notpip/_vendor/rich/__init__.py @@ -1,9 +1,9 @@ """Rich text and beautiful formatting in the terminal.""" import os -from typing import Callable, IO, TYPE_CHECKING, Any, Optional +from typing import Callable, IO, TYPE_CHECKING, Any, Optional, Union -from ._extension import load_ipython_extension +from ._extension import load_ipython_extension # noqa: F401 __all__ = ["get_console", "reconfigure", "print", "inspect"] @@ -73,7 +73,7 @@ def print_json( json: Optional[str] = None, *, data: Any = None, - indent: int = 2, + indent: Union[None, int, str] = 2, highlight: bool = True, skip_keys: bool = False, ensure_ascii: bool = True, diff --git a/pipenv/patched/notpip/_vendor/rich/__main__.py b/pipenv/patched/notpip/_vendor/rich/__main__.py index ae9a2b5f17..ee41c25b87 100644 --- a/pipenv/patched/notpip/_vendor/rich/__main__.py +++ b/pipenv/patched/notpip/_vendor/rich/__main__.py @@ -51,7 +51,6 @@ def make_test_card() -> Table: pad_edge=False, ) color_table.add_row( - # "[bold yellow]256[/] colors or [bold green]16.7 million[/] colors [blue](if supported by your terminal)[/].", ( "✓ [bold green]4-bit color[/]\n" "✓ [bold blue]8-bit color[/]\n" @@ -226,10 +225,12 @@ def iter_last(values: Iterable[T]) -> Iterable[Tuple[bool, T]]: console.print(test_card) taken = round((process_time() - start) * 1000.0, 1) - text = console.file.getvalue() - # https://bugs.python.org/issue37871 - for line in text.splitlines(True): - print(line, end="") + c = Console(record=True) + c.print(test_card) + # c.save_svg( + # path="/Users/darrenburns/Library/Application Support/JetBrains/PyCharm2021.3/scratches/svg_export.svg", + # title="Rich can export to SVG", + # ) print(f"rendered in {pre_cache_taken}ms (cold cache)") print(f"rendered in {taken}ms (warm cache)") @@ -242,6 +243,10 @@ def iter_last(values: Iterable[T]) -> Iterable[Tuple[bool, T]]: sponsor_message.add_column(style="green", justify="right") sponsor_message.add_column(no_wrap=True) + sponsor_message.add_row( + "Textualize", + "[u blue link=https://github.com/textualize]https://github.com/textualize", + ) sponsor_message.add_row( "Buy devs a :coffee:", "[u blue link=https://ko-fi.com/textualize]https://ko-fi.com/textualize", @@ -250,15 +255,12 @@ def iter_last(values: Iterable[T]) -> Iterable[Tuple[bool, T]]: "Twitter", "[u blue link=https://twitter.com/willmcgugan]https://twitter.com/willmcgugan", ) - sponsor_message.add_row( - "Blog", "[u blue link=https://www.willmcgugan.com]https://www.willmcgugan.com" - ) intro_message = Text.from_markup( """\ We hope you enjoy using Rich! -Rich is maintained with :heart: by [link=https://www.textualize.io]Textualize.io[/] +Rich is maintained with [red]:heart:[/] by [link=https://www.textualize.io]Textualize.io[/] - Will McGugan""" ) diff --git a/pipenv/patched/notpip/_vendor/rich/_inspect.py b/pipenv/patched/notpip/_vendor/rich/_inspect.py index 262695b1c4..01713e5767 100644 --- a/pipenv/patched/notpip/_vendor/rich/_inspect.py +++ b/pipenv/patched/notpip/_vendor/rich/_inspect.py @@ -1,9 +1,10 @@ from __future__ import absolute_import +import inspect from inspect import cleandoc, getdoc, getfile, isclass, ismodule, signature from typing import Any, Iterable, Optional, Tuple -from .console import RenderableType, Group +from .console import Group, RenderableType from .highlighter import ReprHighlighter from .jupyter import JupyterMixin from .panel import Panel @@ -97,7 +98,8 @@ def _get_signature(self, name: str, obj: Any) -> Optional[Text]: source_filename: Optional[str] = None try: source_filename = getfile(obj) - except TypeError: + except (OSError, TypeError): + # OSError is raised if obj has no source file, e.g. when defined in REPL. pass callable_name = Text(name, style="inspect.callable") @@ -106,8 +108,17 @@ def _get_signature(self, name: str, obj: Any) -> Optional[Text]: signature_text = self.highlighter(_signature) qualname = name or getattr(obj, "__qualname__", name) + + # If obj is a module, there may be classes (which are callable) to display + if inspect.isclass(obj): + prefix = "class" + else: + prefix = "def" + qual_signature = Text.assemble( - ("def ", "inspect.def"), (qualname, "inspect.callable"), signature_text + (f"{prefix} ", f"inspect.{prefix}"), + (qualname, "inspect.callable"), + signature_text, ) return qual_signature @@ -204,7 +215,8 @@ def safe_getattr(attr_name: str) -> Tuple[Any, Any]: add_row(key_text, Pretty(value, highlighter=highlighter)) if items_table.row_count: yield items_table - else: + elif not_shown_count: yield Text.from_markup( - f"[b cyan]{not_shown_count}[/][i] attribute(s) not shown.[/i] Run [b][magenta]inspect[/]([not b]inspect[/])[/b] for options." + f"[b cyan]{not_shown_count}[/][i] attribute(s) not shown.[/i] " + f"Run [b][magenta]inspect[/]([not b]inspect[/])[/b] for options." ) diff --git a/pipenv/patched/notpip/_vendor/rich/_lru_cache.py b/pipenv/patched/notpip/_vendor/rich/_lru_cache.py index b7bf2ce1ad..c9a50f8283 100644 --- a/pipenv/patched/notpip/_vendor/rich/_lru_cache.py +++ b/pipenv/patched/notpip/_vendor/rich/_lru_cache.py @@ -1,12 +1,16 @@ -from collections import OrderedDict -from typing import Dict, Generic, TypeVar - +from typing import Dict, Generic, TypeVar, TYPE_CHECKING +import sys CacheKey = TypeVar("CacheKey") CacheValue = TypeVar("CacheValue") +if sys.version_info < (3, 9): + from pipenv.patched.notpip._vendor.typing_extensions import OrderedDict +else: + from collections import OrderedDict + -class LRUCache(Generic[CacheKey, CacheValue], OrderedDict): # type: ignore # https://github.com/python/mypy/issues/6904 +class LRUCache(OrderedDict[CacheKey, CacheValue]): """ A dictionary-like container that stores a given maximum items. @@ -17,18 +21,18 @@ class LRUCache(Generic[CacheKey, CacheValue], OrderedDict): # type: ignore # ht def __init__(self, cache_size: int) -> None: self.cache_size = cache_size - super(LRUCache, self).__init__() + super().__init__() def __setitem__(self, key: CacheKey, value: CacheValue) -> None: """Store a new views, potentially discarding an old value.""" if key not in self: if len(self) >= self.cache_size: self.popitem(last=False) - OrderedDict.__setitem__(self, key, value) + super().__setitem__(key, value) - def __getitem__(self: Dict[CacheKey, CacheValue], key: CacheKey) -> CacheValue: + def __getitem__(self, key: CacheKey) -> CacheValue: """Gets the item, but also makes it most recent.""" - value: CacheValue = OrderedDict.__getitem__(self, key) - OrderedDict.__delitem__(self, key) - OrderedDict.__setitem__(self, key, value) + value: CacheValue = super().__getitem__(key) + super().__delitem__(key) + super().__setitem__(key, value) return value diff --git a/pipenv/patched/notpip/_vendor/rich/_spinners.py b/pipenv/patched/notpip/_vendor/rich/_spinners.py index dc1db0777e..d0bb1fe751 100644 --- a/pipenv/patched/notpip/_vendor/rich/_spinners.py +++ b/pipenv/patched/notpip/_vendor/rich/_spinners.py @@ -22,149 +22,36 @@ SPINNERS = { "dots": { "interval": 80, - "frames": ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"], + "frames": "⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏", }, - "dots2": {"interval": 80, "frames": ["⣾", "⣽", "⣻", "⢿", "⡿", "⣟", "⣯", "⣷"]}, + "dots2": {"interval": 80, "frames": "⣾⣽⣻⢿⡿⣟⣯⣷"}, "dots3": { "interval": 80, - "frames": ["⠋", "⠙", "⠚", "⠞", "⠖", "⠦", "⠴", "⠲", "⠳", "⠓"], + "frames": "⠋⠙⠚⠞⠖⠦⠴⠲⠳⠓", }, "dots4": { "interval": 80, - "frames": [ - "⠄", - "⠆", - "⠇", - "⠋", - "⠙", - "⠸", - "⠰", - "⠠", - "⠰", - "⠸", - "⠙", - "⠋", - "⠇", - "⠆", - ], + "frames": "⠄⠆⠇⠋⠙⠸⠰⠠⠰⠸⠙⠋⠇⠆", }, "dots5": { "interval": 80, - "frames": [ - "⠋", - "⠙", - "⠚", - "⠒", - "⠂", - "⠂", - "⠒", - "⠲", - "⠴", - "⠦", - "⠖", - "⠒", - "⠐", - "⠐", - "⠒", - "⠓", - "⠋", - ], + "frames": "⠋⠙⠚⠒⠂⠂⠒⠲⠴⠦⠖⠒⠐⠐⠒⠓⠋", }, "dots6": { "interval": 80, - "frames": [ - "⠁", - "⠉", - "⠙", - "⠚", - "⠒", - "⠂", - "⠂", - "⠒", - "⠲", - "⠴", - "⠤", - "⠄", - "⠄", - "⠤", - "⠴", - "⠲", - "⠒", - "⠂", - "⠂", - "⠒", - "⠚", - "⠙", - "⠉", - "⠁", - ], + "frames": "⠁⠉⠙⠚⠒⠂⠂⠒⠲⠴⠤⠄⠄⠤⠴⠲⠒⠂⠂⠒⠚⠙⠉⠁", }, "dots7": { "interval": 80, - "frames": [ - "⠈", - "⠉", - "⠋", - "⠓", - "⠒", - "⠐", - "⠐", - "⠒", - "⠖", - "⠦", - "⠤", - "⠠", - "⠠", - "⠤", - "⠦", - "⠖", - "⠒", - "⠐", - "⠐", - "⠒", - "⠓", - "⠋", - "⠉", - "⠈", - ], + "frames": "⠈⠉⠋⠓⠒⠐⠐⠒⠖⠦⠤⠠⠠⠤⠦⠖⠒⠐⠐⠒⠓⠋⠉⠈", }, "dots8": { "interval": 80, - "frames": [ - "⠁", - "⠁", - "⠉", - "⠙", - "⠚", - "⠒", - "⠂", - "⠂", - "⠒", - "⠲", - "⠴", - "⠤", - "⠄", - "⠄", - "⠤", - "⠠", - "⠠", - "⠤", - "⠦", - "⠖", - "⠒", - "⠐", - "⠐", - "⠒", - "⠓", - "⠋", - "⠉", - "⠈", - "⠈", - ], + "frames": "⠁⠁⠉⠙⠚⠒⠂⠂⠒⠲⠴⠤⠄⠄⠤⠠⠠⠤⠦⠖⠒⠐⠐⠒⠓⠋⠉⠈⠈", }, - "dots9": {"interval": 80, "frames": ["⢹", "⢺", "⢼", "⣸", "⣇", "⡧", "⡗", "⡏"]}, - "dots10": {"interval": 80, "frames": ["⢄", "⢂", "⢁", "⡁", "⡈", "⡐", "⡠"]}, - "dots11": {"interval": 100, "frames": ["⠁", "⠂", "⠄", "⡀", "⢀", "⠠", "⠐", "⠈"]}, + "dots9": {"interval": 80, "frames": "⢹⢺⢼⣸⣇⡧⡗⡏"}, + "dots10": {"interval": 80, "frames": "⢄⢂⢁⡁⡈⡐⡠"}, + "dots11": {"interval": 100, "frames": "⠁⠂⠄⡀⢀⠠⠐⠈"}, "dots12": { "interval": 80, "frames": [ @@ -228,315 +115,62 @@ }, "dots8Bit": { "interval": 80, - "frames": [ - "⠀", - "⠁", - "⠂", - "⠃", - "⠄", - "⠅", - "⠆", - "⠇", - "⡀", - "⡁", - "⡂", - "⡃", - "⡄", - "⡅", - "⡆", - "⡇", - "⠈", - "⠉", - "⠊", - "⠋", - "⠌", - "⠍", - "⠎", - "⠏", - "⡈", - "⡉", - "⡊", - "⡋", - "⡌", - "⡍", - "⡎", - "⡏", - "⠐", - "⠑", - "⠒", - "⠓", - "⠔", - "⠕", - "⠖", - "⠗", - "⡐", - "⡑", - "⡒", - "⡓", - "⡔", - "⡕", - "⡖", - "⡗", - "⠘", - "⠙", - "⠚", - "⠛", - "⠜", - "⠝", - "⠞", - "⠟", - "⡘", - "⡙", - "⡚", - "⡛", - "⡜", - "⡝", - "⡞", - "⡟", - "⠠", - "⠡", - "⠢", - "⠣", - "⠤", - "⠥", - "⠦", - "⠧", - "⡠", - "⡡", - "⡢", - "⡣", - "⡤", - "⡥", - "⡦", - "⡧", - "⠨", - "⠩", - "⠪", - "⠫", - "⠬", - "⠭", - "⠮", - "⠯", - "⡨", - "⡩", - "⡪", - "⡫", - "⡬", - "⡭", - "⡮", - "⡯", - "⠰", - "⠱", - "⠲", - "⠳", - "⠴", - "⠵", - "⠶", - "⠷", - "⡰", - "⡱", - "⡲", - "⡳", - "⡴", - "⡵", - "⡶", - "⡷", - "⠸", - "⠹", - "⠺", - "⠻", - "⠼", - "⠽", - "⠾", - "⠿", - "⡸", - "⡹", - "⡺", - "⡻", - "⡼", - "⡽", - "⡾", - "⡿", - "⢀", - "⢁", - "⢂", - "⢃", - "⢄", - "⢅", - "⢆", - "⢇", - "⣀", - "⣁", - "⣂", - "⣃", - "⣄", - "⣅", - "⣆", - "⣇", - "⢈", - "⢉", - "⢊", - "⢋", - "⢌", - "⢍", - "⢎", - "⢏", - "⣈", - "⣉", - "⣊", - "⣋", - "⣌", - "⣍", - "⣎", - "⣏", - "⢐", - "⢑", - "⢒", - "⢓", - "⢔", - "⢕", - "⢖", - "⢗", - "⣐", - "⣑", - "⣒", - "⣓", - "⣔", - "⣕", - "⣖", - "⣗", - "⢘", - "⢙", - "⢚", - "⢛", - "⢜", - "⢝", - "⢞", - "⢟", - "⣘", - "⣙", - "⣚", - "⣛", - "⣜", - "⣝", - "⣞", - "⣟", - "⢠", - "⢡", - "⢢", - "⢣", - "⢤", - "⢥", - "⢦", - "⢧", - "⣠", - "⣡", - "⣢", - "⣣", - "⣤", - "⣥", - "⣦", - "⣧", - "⢨", - "⢩", - "⢪", - "⢫", - "⢬", - "⢭", - "⢮", - "⢯", - "⣨", - "⣩", - "⣪", - "⣫", - "⣬", - "⣭", - "⣮", - "⣯", - "⢰", - "⢱", - "⢲", - "⢳", - "⢴", - "⢵", - "⢶", - "⢷", - "⣰", - "⣱", - "⣲", - "⣳", - "⣴", - "⣵", - "⣶", - "⣷", - "⢸", - "⢹", - "⢺", - "⢻", - "⢼", - "⢽", - "⢾", - "⢿", - "⣸", - "⣹", - "⣺", - "⣻", - "⣼", - "⣽", - "⣾", - "⣿", - ], + "frames": "⠀⠁⠂⠃⠄⠅⠆⠇⡀⡁⡂⡃⡄⡅⡆⡇⠈⠉⠊⠋⠌⠍⠎⠏⡈⡉⡊⡋⡌⡍⡎⡏⠐⠑⠒⠓⠔⠕⠖⠗⡐⡑⡒⡓⡔⡕⡖⡗⠘⠙⠚⠛⠜⠝⠞⠟⡘⡙" + "⡚⡛⡜⡝⡞⡟⠠⠡⠢⠣⠤⠥⠦⠧⡠⡡⡢⡣⡤⡥⡦⡧⠨⠩⠪⠫⠬⠭⠮⠯⡨⡩⡪⡫⡬⡭⡮⡯⠰⠱⠲⠳⠴⠵⠶⠷⡰⡱⡲⡳⡴⡵⡶⡷⠸⠹⠺⠻" + "⠼⠽⠾⠿⡸⡹⡺⡻⡼⡽⡾⡿⢀⢁⢂⢃⢄⢅⢆⢇⣀⣁⣂⣃⣄⣅⣆⣇⢈⢉⢊⢋⢌⢍⢎⢏⣈⣉⣊⣋⣌⣍⣎⣏⢐⢑⢒⢓⢔⢕⢖⢗⣐⣑⣒⣓⣔⣕" + "⣖⣗⢘⢙⢚⢛⢜⢝⢞⢟⣘⣙⣚⣛⣜⣝⣞⣟⢠⢡⢢⢣⢤⢥⢦⢧⣠⣡⣢⣣⣤⣥⣦⣧⢨⢩⢪⢫⢬⢭⢮⢯⣨⣩⣪⣫⣬⣭⣮⣯⢰⢱⢲⢳⢴⢵⢶⢷" + "⣰⣱⣲⣳⣴⣵⣶⣷⢸⢹⢺⢻⢼⢽⢾⢿⣸⣹⣺⣻⣼⣽⣾⣿", }, "line": {"interval": 130, "frames": ["-", "\\", "|", "/"]}, - "line2": {"interval": 100, "frames": ["⠂", "-", "–", "—", "–", "-"]}, - "pipe": {"interval": 100, "frames": ["┤", "┘", "┴", "└", "├", "┌", "┬", "┐"]}, + "line2": {"interval": 100, "frames": "⠂-–—–-"}, + "pipe": {"interval": 100, "frames": "┤┘┴└├┌┬┐"}, "simpleDots": {"interval": 400, "frames": [". ", ".. ", "...", " "]}, "simpleDotsScrolling": { "interval": 200, "frames": [". ", ".. ", "...", " ..", " .", " "], }, - "star": {"interval": 70, "frames": ["✶", "✸", "✹", "✺", "✹", "✷"]}, - "star2": {"interval": 80, "frames": ["+", "x", "*"]}, + "star": {"interval": 70, "frames": "✶✸✹✺✹✷"}, + "star2": {"interval": 80, "frames": "+x*"}, "flip": { "interval": 70, - "frames": ["_", "_", "_", "-", "`", "`", "'", "´", "-", "_", "_", "_"], + "frames": "___-``'´-___", }, - "hamburger": {"interval": 100, "frames": ["☱", "☲", "☴"]}, + "hamburger": {"interval": 100, "frames": "☱☲☴"}, "growVertical": { "interval": 120, - "frames": ["▁", "▃", "▄", "▅", "▆", "▇", "▆", "▅", "▄", "▃"], + "frames": "▁▃▄▅▆▇▆▅▄▃", }, "growHorizontal": { "interval": 120, - "frames": ["▏", "▎", "▍", "▌", "▋", "▊", "▉", "▊", "▋", "▌", "▍", "▎"], + "frames": "▏▎▍▌▋▊▉▊▋▌▍▎", }, - "balloon": {"interval": 140, "frames": [" ", ".", "o", "O", "@", "*", " "]}, - "balloon2": {"interval": 120, "frames": [".", "o", "O", "°", "O", "o", "."]}, - "noise": {"interval": 100, "frames": ["▓", "▒", "░"]}, - "bounce": {"interval": 120, "frames": ["⠁", "⠂", "⠄", "⠂"]}, - "boxBounce": {"interval": 120, "frames": ["▖", "▘", "▝", "▗"]}, - "boxBounce2": {"interval": 100, "frames": ["▌", "▀", "▐", "▄"]}, - "triangle": {"interval": 50, "frames": ["◢", "◣", "◤", "◥"]}, - "arc": {"interval": 100, "frames": ["◜", "◠", "◝", "◞", "◡", "◟"]}, - "circle": {"interval": 120, "frames": ["◡", "⊙", "◠"]}, - "squareCorners": {"interval": 180, "frames": ["◰", "◳", "◲", "◱"]}, - "circleQuarters": {"interval": 120, "frames": ["◴", "◷", "◶", "◵"]}, - "circleHalves": {"interval": 50, "frames": ["◐", "◓", "◑", "◒"]}, - "squish": {"interval": 100, "frames": ["╫", "╪"]}, - "toggle": {"interval": 250, "frames": ["⊶", "⊷"]}, - "toggle2": {"interval": 80, "frames": ["▫", "▪"]}, - "toggle3": {"interval": 120, "frames": ["□", "■"]}, - "toggle4": {"interval": 100, "frames": ["■", "□", "▪", "▫"]}, - "toggle5": {"interval": 100, "frames": ["▮", "▯"]}, - "toggle6": {"interval": 300, "frames": ["ဝ", "၀"]}, - "toggle7": {"interval": 80, "frames": ["⦾", "⦿"]}, - "toggle8": {"interval": 100, "frames": ["◍", "◌"]}, - "toggle9": {"interval": 100, "frames": ["◉", "◎"]}, - "toggle10": {"interval": 100, "frames": ["㊂", "㊀", "㊁"]}, - "toggle11": {"interval": 50, "frames": ["⧇", "⧆"]}, - "toggle12": {"interval": 120, "frames": ["☗", "☖"]}, - "toggle13": {"interval": 80, "frames": ["=", "*", "-"]}, - "arrow": {"interval": 100, "frames": ["←", "↖", "↑", "↗", "→", "↘", "↓", "↙"]}, + "balloon": {"interval": 140, "frames": " .oO@* "}, + "balloon2": {"interval": 120, "frames": ".oO°Oo."}, + "noise": {"interval": 100, "frames": "▓▒░"}, + "bounce": {"interval": 120, "frames": "⠁⠂⠄⠂"}, + "boxBounce": {"interval": 120, "frames": "▖▘▝▗"}, + "boxBounce2": {"interval": 100, "frames": "▌▀▐▄"}, + "triangle": {"interval": 50, "frames": "◢◣◤◥"}, + "arc": {"interval": 100, "frames": "◜◠◝◞◡◟"}, + "circle": {"interval": 120, "frames": "◡⊙◠"}, + "squareCorners": {"interval": 180, "frames": "◰◳◲◱"}, + "circleQuarters": {"interval": 120, "frames": "◴◷◶◵"}, + "circleHalves": {"interval": 50, "frames": "◐◓◑◒"}, + "squish": {"interval": 100, "frames": "╫╪"}, + "toggle": {"interval": 250, "frames": "⊶⊷"}, + "toggle2": {"interval": 80, "frames": "▫▪"}, + "toggle3": {"interval": 120, "frames": "□■"}, + "toggle4": {"interval": 100, "frames": "■□▪▫"}, + "toggle5": {"interval": 100, "frames": "▮▯"}, + "toggle6": {"interval": 300, "frames": "ဝ၀"}, + "toggle7": {"interval": 80, "frames": "⦾⦿"}, + "toggle8": {"interval": 100, "frames": "◍◌"}, + "toggle9": {"interval": 100, "frames": "◉◎"}, + "toggle10": {"interval": 100, "frames": "㊂㊀㊁"}, + "toggle11": {"interval": 50, "frames": "⧇⧆"}, + "toggle12": {"interval": 120, "frames": "☗☖"}, + "toggle13": {"interval": 80, "frames": "=*-"}, + "arrow": {"interval": 100, "frames": "←↖↑↗→↘↓↙"}, "arrow2": { "interval": 80, "frames": ["⬆️ ", "↗️ ", "➡️ ", "↘️ ", "⬇️ ", "↙️ ", "⬅️ ", "↖️ "], @@ -769,7 +403,7 @@ "▐/|____________▌", ], }, - "dqpb": {"interval": 100, "frames": ["d", "q", "p", "b"]}, + "dqpb": {"interval": 100, "frames": "dqpb"}, "weather": { "interval": 100, "frames": [ @@ -798,7 +432,7 @@ "☀️ ", ], }, - "christmas": {"interval": 400, "frames": ["🌲", "🎄"]}, + "christmas": {"interval": 400, "frames": "🌲🎄"}, "grenade": { "interval": 80, "frames": [ @@ -819,7 +453,7 @@ ], }, "point": {"interval": 125, "frames": ["∙∙∙", "●∙∙", "∙●∙", "∙∙●", "∙∙∙"]}, - "layer": {"interval": 150, "frames": ["-", "=", "≡"]}, + "layer": {"interval": 150, "frames": "-=≡"}, "betaWave": { "interval": 80, "frames": [ diff --git a/pipenv/patched/notpip/_vendor/rich/_win32_console.py b/pipenv/patched/notpip/_vendor/rich/_win32_console.py new file mode 100644 index 0000000000..f291b689ec --- /dev/null +++ b/pipenv/patched/notpip/_vendor/rich/_win32_console.py @@ -0,0 +1,630 @@ +"""Light wrapper around the Win32 Console API - this module should only be imported on Windows + +The API that this module wraps is documented at https://docs.microsoft.com/en-us/windows/console/console-functions +""" +import ctypes +import sys +from typing import Any + +windll: Any = None +if sys.platform == "win32": + windll = ctypes.LibraryLoader(ctypes.WinDLL) +else: + raise ImportError(f"{__name__} can only be imported on Windows") + +import time +from ctypes import Structure, byref, wintypes +from typing import IO, NamedTuple, Type, cast + +from pipenv.patched.notpip._vendor.rich.color import ColorSystem +from pipenv.patched.notpip._vendor.rich.style import Style + +STDOUT = -11 +ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4 + +COORD = wintypes._COORD + + +class LegacyWindowsError(Exception): + pass + + +class WindowsCoordinates(NamedTuple): + """Coordinates in the Windows Console API are (y, x), not (x, y). + This class is intended to prevent that confusion. + Rows and columns are indexed from 0. + This class can be used in place of wintypes._COORD in arguments and argtypes. + """ + + row: int + col: int + + @classmethod + def from_param(cls, value: "WindowsCoordinates") -> COORD: + """Converts a WindowsCoordinates into a wintypes _COORD structure. + This classmethod is internally called by ctypes to perform the conversion. + + Args: + value (WindowsCoordinates): The input coordinates to convert. + + Returns: + wintypes._COORD: The converted coordinates struct. + """ + return COORD(value.col, value.row) + + +class CONSOLE_SCREEN_BUFFER_INFO(Structure): + _fields_ = [ + ("dwSize", COORD), + ("dwCursorPosition", COORD), + ("wAttributes", wintypes.WORD), + ("srWindow", wintypes.SMALL_RECT), + ("dwMaximumWindowSize", COORD), + ] + + +class CONSOLE_CURSOR_INFO(ctypes.Structure): + _fields_ = [("dwSize", wintypes.DWORD), ("bVisible", wintypes.BOOL)] + + +_GetStdHandle = windll.kernel32.GetStdHandle +_GetStdHandle.argtypes = [ + wintypes.DWORD, +] +_GetStdHandle.restype = wintypes.HANDLE + + +def GetStdHandle(handle: int = STDOUT) -> wintypes.HANDLE: + """Retrieves a handle to the specified standard device (standard input, standard output, or standard error). + + Args: + handle (int): Integer identifier for the handle. Defaults to -11 (stdout). + + Returns: + wintypes.HANDLE: The handle + """ + return cast(wintypes.HANDLE, _GetStdHandle(handle)) + + +_GetConsoleMode = windll.kernel32.GetConsoleMode +_GetConsoleMode.argtypes = [wintypes.HANDLE, wintypes.LPDWORD] +_GetConsoleMode.restype = wintypes.BOOL + + +def GetConsoleMode(std_handle: wintypes.HANDLE) -> int: + """Retrieves the current input mode of a console's input buffer + or the current output mode of a console screen buffer. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + + Raises: + LegacyWindowsError: If any error occurs while calling the Windows console API. + + Returns: + int: Value representing the current console mode as documented at + https://docs.microsoft.com/en-us/windows/console/getconsolemode#parameters + """ + + console_mode = wintypes.DWORD() + success = bool(_GetConsoleMode(std_handle, console_mode)) + if not success: + raise LegacyWindowsError("Unable to get legacy Windows Console Mode") + return console_mode.value + + +_FillConsoleOutputCharacterW = windll.kernel32.FillConsoleOutputCharacterW +_FillConsoleOutputCharacterW.argtypes = [ + wintypes.HANDLE, + ctypes.c_char, + wintypes.DWORD, + cast(Type[COORD], WindowsCoordinates), + ctypes.POINTER(wintypes.DWORD), +] +_FillConsoleOutputCharacterW.restype = wintypes.BOOL + + +def FillConsoleOutputCharacter( + std_handle: wintypes.HANDLE, + char: str, + length: int, + start: WindowsCoordinates, +) -> int: + """Writes a character to the console screen buffer a specified number of times, beginning at the specified coordinates. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + char (str): The character to write. Must be a string of length 1. + length (int): The number of times to write the character. + start (WindowsCoordinates): The coordinates to start writing at. + + Returns: + int: The number of characters written. + """ + character = ctypes.c_char(char.encode()) + num_characters = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + _FillConsoleOutputCharacterW( + std_handle, + character, + num_characters, + start, + byref(num_written), + ) + return num_written.value + + +_FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute +_FillConsoleOutputAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + wintypes.DWORD, + cast(Type[COORD], WindowsCoordinates), + ctypes.POINTER(wintypes.DWORD), +] +_FillConsoleOutputAttribute.restype = wintypes.BOOL + + +def FillConsoleOutputAttribute( + std_handle: wintypes.HANDLE, + attributes: int, + length: int, + start: WindowsCoordinates, +) -> int: + """Sets the character attributes for a specified number of character cells, + beginning at the specified coordinates in a screen buffer. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + attributes (int): Integer value representing the foreground and background colours of the cells. + length (int): The number of cells to set the output attribute of. + start (WindowsCoordinates): The coordinates of the first cell whose attributes are to be set. + + Returns: + int: The number of cells whose attributes were actually set. + """ + num_cells = wintypes.DWORD(length) + style_attrs = wintypes.WORD(attributes) + num_written = wintypes.DWORD(0) + _FillConsoleOutputAttribute( + std_handle, style_attrs, num_cells, start, byref(num_written) + ) + return num_written.value + + +_SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute +_SetConsoleTextAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, +] +_SetConsoleTextAttribute.restype = wintypes.BOOL + + +def SetConsoleTextAttribute( + std_handle: wintypes.HANDLE, attributes: wintypes.WORD +) -> bool: + """Set the colour attributes for all text written after this function is called. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + attributes (int): Integer value representing the foreground and background colours. + + + Returns: + bool: True if the attribute was set successfully, otherwise False. + """ + return bool(_SetConsoleTextAttribute(std_handle, attributes)) + + +_GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo +_GetConsoleScreenBufferInfo.argtypes = [ + wintypes.HANDLE, + ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO), +] +_GetConsoleScreenBufferInfo.restype = wintypes.BOOL + + +def GetConsoleScreenBufferInfo( + std_handle: wintypes.HANDLE, +) -> CONSOLE_SCREEN_BUFFER_INFO: + """Retrieves information about the specified console screen buffer. + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + + Returns: + CONSOLE_SCREEN_BUFFER_INFO: A CONSOLE_SCREEN_BUFFER_INFO ctype struct contain information about + screen size, cursor position, colour attributes, and more.""" + console_screen_buffer_info = CONSOLE_SCREEN_BUFFER_INFO() + _GetConsoleScreenBufferInfo(std_handle, byref(console_screen_buffer_info)) + return console_screen_buffer_info + + +_SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition +_SetConsoleCursorPosition.argtypes = [ + wintypes.HANDLE, + cast(Type[COORD], WindowsCoordinates), +] +_SetConsoleCursorPosition.restype = wintypes.BOOL + + +def SetConsoleCursorPosition( + std_handle: wintypes.HANDLE, coords: WindowsCoordinates +) -> bool: + """Set the position of the cursor in the console screen + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + coords (WindowsCoordinates): The coordinates to move the cursor to. + + Returns: + bool: True if the function succeeds, otherwise False. + """ + return bool(_SetConsoleCursorPosition(std_handle, coords)) + + +_SetConsoleCursorInfo = windll.kernel32.SetConsoleCursorInfo +_SetConsoleCursorInfo.argtypes = [ + wintypes.HANDLE, + ctypes.POINTER(CONSOLE_CURSOR_INFO), +] +_SetConsoleCursorInfo.restype = wintypes.BOOL + + +def SetConsoleCursorInfo( + std_handle: wintypes.HANDLE, cursor_info: CONSOLE_CURSOR_INFO +) -> bool: + """Set the cursor info - used for adjusting cursor visibility and width + + Args: + std_handle (wintypes.HANDLE): A handle to the console input buffer or the console screen buffer. + cursor_info (CONSOLE_CURSOR_INFO): CONSOLE_CURSOR_INFO ctype struct containing the new cursor info. + + Returns: + bool: True if the function succeeds, otherwise False. + """ + return bool(_SetConsoleCursorInfo(std_handle, byref(cursor_info))) + + +_SetConsoleTitle = windll.kernel32.SetConsoleTitleW +_SetConsoleTitle.argtypes = [wintypes.LPCWSTR] +_SetConsoleTitle.restype = wintypes.BOOL + + +def SetConsoleTitle(title: str) -> bool: + """Sets the title of the current console window + + Args: + title (str): The new title of the console window. + + Returns: + bool: True if the function succeeds, otherwise False. + """ + return bool(_SetConsoleTitle(title)) + + +class LegacyWindowsTerm: + """This class allows interaction with the legacy Windows Console API. It should only be used in the context + of environments where virtual terminal processing is not available. However, if it is used in a Windows environment, + the entire API should work. + + Args: + file (IO[str]): The file which the Windows Console API HANDLE is retrieved from, defaults to sys.stdout. + """ + + BRIGHT_BIT = 8 + + # Indices are ANSI color numbers, values are the corresponding Windows Console API color numbers + ANSI_TO_WINDOWS = [ + 0, # black The Windows colours are defined in wincon.h as follows: + 4, # red define FOREGROUND_BLUE 0x0001 -- 0000 0001 + 2, # green define FOREGROUND_GREEN 0x0002 -- 0000 0010 + 6, # yellow define FOREGROUND_RED 0x0004 -- 0000 0100 + 1, # blue define FOREGROUND_INTENSITY 0x0008 -- 0000 1000 + 5, # magenta define BACKGROUND_BLUE 0x0010 -- 0001 0000 + 3, # cyan define BACKGROUND_GREEN 0x0020 -- 0010 0000 + 7, # white define BACKGROUND_RED 0x0040 -- 0100 0000 + 8, # bright black (grey) define BACKGROUND_INTENSITY 0x0080 -- 1000 0000 + 12, # bright red + 10, # bright green + 14, # bright yellow + 9, # bright blue + 13, # bright magenta + 11, # bright cyan + 15, # bright white + ] + + def __init__(self, file: "IO[str]") -> None: + handle = GetStdHandle(STDOUT) + self._handle = handle + default_text = GetConsoleScreenBufferInfo(handle).wAttributes + self._default_text = default_text + + self._default_fore = default_text & 7 + self._default_back = (default_text >> 4) & 7 + self._default_attrs = self._default_fore | (self._default_back << 4) + + self._file = file + self.write = file.write + self.flush = file.flush + + @property + def cursor_position(self) -> WindowsCoordinates: + """Returns the current position of the cursor (0-based) + + Returns: + WindowsCoordinates: The current cursor position. + """ + coord: COORD = GetConsoleScreenBufferInfo(self._handle).dwCursorPosition + return WindowsCoordinates(row=cast(int, coord.Y), col=cast(int, coord.X)) + + @property + def screen_size(self) -> WindowsCoordinates: + """Returns the current size of the console screen buffer, in character columns and rows + + Returns: + WindowsCoordinates: The width and height of the screen as WindowsCoordinates. + """ + screen_size: COORD = GetConsoleScreenBufferInfo(self._handle).dwSize + return WindowsCoordinates( + row=cast(int, screen_size.Y), col=cast(int, screen_size.X) + ) + + def write_text(self, text: str) -> None: + """Write text directly to the terminal without any modification of styles + + Args: + text (str): The text to write to the console + """ + self.write(text) + self.flush() + + def write_styled(self, text: str, style: Style) -> None: + """Write styled text to the terminal. + + Args: + text (str): The text to write + style (Style): The style of the text + """ + color = style.color + bgcolor = style.bgcolor + if style.reverse: + color, bgcolor = bgcolor, color + + if color: + fore = color.downgrade(ColorSystem.WINDOWS).number + fore = fore if fore is not None else 7 # Default to ANSI 7: White + if style.bold: + fore = fore | self.BRIGHT_BIT + if style.dim: + fore = fore & ~self.BRIGHT_BIT + fore = self.ANSI_TO_WINDOWS[fore] + else: + fore = self._default_fore + + if bgcolor: + back = bgcolor.downgrade(ColorSystem.WINDOWS).number + back = back if back is not None else 0 # Default to ANSI 0: Black + back = self.ANSI_TO_WINDOWS[back] + else: + back = self._default_back + + assert fore is not None + assert back is not None + + SetConsoleTextAttribute( + self._handle, attributes=ctypes.c_ushort(fore | (back << 4)) + ) + self.write_text(text) + SetConsoleTextAttribute(self._handle, attributes=self._default_text) + + def move_cursor_to(self, new_position: WindowsCoordinates) -> None: + """Set the position of the cursor + + Args: + new_position (WindowsCoordinates): The WindowsCoordinates representing the new position of the cursor. + """ + if new_position.col < 0 or new_position.row < 0: + return + SetConsoleCursorPosition(self._handle, coords=new_position) + + def erase_line(self) -> None: + """Erase all content on the line the cursor is currently located at""" + screen_size = self.screen_size + cursor_position = self.cursor_position + cells_to_erase = screen_size.col + start_coordinates = WindowsCoordinates(row=cursor_position.row, col=0) + FillConsoleOutputCharacter( + self._handle, " ", length=cells_to_erase, start=start_coordinates + ) + FillConsoleOutputAttribute( + self._handle, + self._default_attrs, + length=cells_to_erase, + start=start_coordinates, + ) + + def erase_end_of_line(self) -> None: + """Erase all content from the cursor position to the end of that line""" + cursor_position = self.cursor_position + cells_to_erase = self.screen_size.col - cursor_position.col + FillConsoleOutputCharacter( + self._handle, " ", length=cells_to_erase, start=cursor_position + ) + FillConsoleOutputAttribute( + self._handle, + self._default_attrs, + length=cells_to_erase, + start=cursor_position, + ) + + def erase_start_of_line(self) -> None: + """Erase all content from the cursor position to the start of that line""" + row, col = self.cursor_position + start = WindowsCoordinates(row, 0) + FillConsoleOutputCharacter(self._handle, " ", length=col, start=start) + FillConsoleOutputAttribute( + self._handle, self._default_attrs, length=col, start=start + ) + + def move_cursor_up(self) -> None: + """Move the cursor up a single cell""" + cursor_position = self.cursor_position + SetConsoleCursorPosition( + self._handle, + coords=WindowsCoordinates( + row=cursor_position.row - 1, col=cursor_position.col + ), + ) + + def move_cursor_down(self) -> None: + """Move the cursor down a single cell""" + cursor_position = self.cursor_position + SetConsoleCursorPosition( + self._handle, + coords=WindowsCoordinates( + row=cursor_position.row + 1, + col=cursor_position.col, + ), + ) + + def move_cursor_forward(self) -> None: + """Move the cursor forward a single cell. Wrap to the next line if required.""" + row, col = self.cursor_position + if col == self.screen_size.col - 1: + row += 1 + col = 0 + else: + col += 1 + SetConsoleCursorPosition( + self._handle, coords=WindowsCoordinates(row=row, col=col) + ) + + def move_cursor_to_column(self, column: int) -> None: + """Move cursor to the column specified by the zero-based column index, staying on the same row + + Args: + column (int): The zero-based column index to move the cursor to. + """ + row, _ = self.cursor_position + SetConsoleCursorPosition(self._handle, coords=WindowsCoordinates(row, column)) + + def move_cursor_backward(self) -> None: + """Move the cursor backward a single cell. Wrap to the previous line if required.""" + row, col = self.cursor_position + if col == 0: + row -= 1 + col = self.screen_size.col - 1 + else: + col -= 1 + SetConsoleCursorPosition( + self._handle, coords=WindowsCoordinates(row=row, col=col) + ) + + def hide_cursor(self) -> None: + """Hide the cursor""" + invisible_cursor = CONSOLE_CURSOR_INFO(dwSize=100, bVisible=0) + SetConsoleCursorInfo(self._handle, cursor_info=invisible_cursor) + + def show_cursor(self) -> None: + """Show the cursor""" + visible_cursor = CONSOLE_CURSOR_INFO(dwSize=100, bVisible=1) + SetConsoleCursorInfo(self._handle, cursor_info=visible_cursor) + + def set_title(self, title: str) -> None: + """Set the title of the terminal window + + Args: + title (str): The new title of the console window + """ + assert len(title) < 255, "Console title must be less than 255 characters" + SetConsoleTitle(title) + + +if __name__ == "__main__": + handle = GetStdHandle() + + from pipenv.patched.notpip._vendor.rich.console import Console + + console = Console() + + term = LegacyWindowsTerm(sys.stdout) + term.set_title("Win32 Console Examples") + + style = Style(color="black", bgcolor="red") + + heading = Style.parse("black on green") + + # Check colour output + console.rule("Checking colour output") + console.print("[on red]on red!") + console.print("[blue]blue!") + console.print("[yellow]yellow!") + console.print("[bold yellow]bold yellow!") + console.print("[bright_yellow]bright_yellow!") + console.print("[dim bright_yellow]dim bright_yellow!") + console.print("[italic cyan]italic cyan!") + console.print("[bold white on blue]bold white on blue!") + console.print("[reverse bold white on blue]reverse bold white on blue!") + console.print("[bold black on cyan]bold black on cyan!") + console.print("[black on green]black on green!") + console.print("[blue on green]blue on green!") + console.print("[white on black]white on black!") + console.print("[black on white]black on white!") + console.print("[#1BB152 on #DA812D]#1BB152 on #DA812D!") + + # Check cursor movement + console.rule("Checking cursor movement") + console.print() + term.move_cursor_backward() + term.move_cursor_backward() + term.write_text("went back and wrapped to prev line") + time.sleep(1) + term.move_cursor_up() + term.write_text("we go up") + time.sleep(1) + term.move_cursor_down() + term.write_text("and down") + time.sleep(1) + term.move_cursor_up() + term.move_cursor_backward() + term.move_cursor_backward() + term.write_text("we went up and back 2") + time.sleep(1) + term.move_cursor_down() + term.move_cursor_backward() + term.move_cursor_backward() + term.write_text("we went down and back 2") + time.sleep(1) + + # Check erasing of lines + term.hide_cursor() + console.print() + console.rule("Checking line erasing") + console.print("\n...Deleting to the start of the line...") + term.write_text("The red arrow shows the cursor location, and direction of erase") + time.sleep(1) + term.move_cursor_to_column(16) + term.write_styled("<", Style.parse("black on red")) + term.move_cursor_backward() + time.sleep(1) + term.erase_start_of_line() + time.sleep(1) + + console.print("\n\n...And to the end of the line...") + term.write_text("The red arrow shows the cursor location, and direction of erase") + time.sleep(1) + + term.move_cursor_to_column(16) + term.write_styled(">", Style.parse("black on red")) + time.sleep(1) + term.erase_end_of_line() + time.sleep(1) + + console.print("\n\n...Now the whole line will be erased...") + term.write_styled("I'm going to disappear!", style=Style.parse("black on cyan")) + time.sleep(1) + term.erase_line() + + term.show_cursor() + print("\n") diff --git a/pipenv/patched/notpip/_vendor/rich/_windows.py b/pipenv/patched/notpip/_vendor/rich/_windows.py index 83d88b99b2..9b2c572a49 100644 --- a/pipenv/patched/notpip/_vendor/rich/_windows.py +++ b/pipenv/patched/notpip/_vendor/rich/_windows.py @@ -14,13 +14,21 @@ class WindowsConsoleFeatures: try: import ctypes - from ctypes import LibraryLoader, wintypes + from ctypes import LibraryLoader if sys.platform == "win32": windll = LibraryLoader(ctypes.WinDLL) else: windll = None raise ImportError("Not windows") + + from pipenv.patched.notpip._vendor.rich._win32_console import ( + ENABLE_VIRTUAL_TERMINAL_PROCESSING, + GetConsoleMode, + GetStdHandle, + LegacyWindowsError, + ) + except (AttributeError, ImportError, ValueError): # Fallback if we can't load the Windows DLL @@ -30,28 +38,20 @@ def get_windows_console_features() -> WindowsConsoleFeatures: else: - STDOUT = -11 - ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4 - _GetConsoleMode = windll.kernel32.GetConsoleMode - _GetConsoleMode.argtypes = [wintypes.HANDLE, wintypes.LPDWORD] - _GetConsoleMode.restype = wintypes.BOOL - - _GetStdHandle = windll.kernel32.GetStdHandle - _GetStdHandle.argtypes = [ - wintypes.DWORD, - ] - _GetStdHandle.restype = wintypes.HANDLE - def get_windows_console_features() -> WindowsConsoleFeatures: """Get windows console features. Returns: WindowsConsoleFeatures: An instance of WindowsConsoleFeatures. """ - handle = _GetStdHandle(STDOUT) - console_mode = wintypes.DWORD() - result = _GetConsoleMode(handle, console_mode) - vt = bool(result and console_mode.value & ENABLE_VIRTUAL_TERMINAL_PROCESSING) + handle = GetStdHandle() + try: + console_mode = GetConsoleMode(handle) + success = True + except LegacyWindowsError: + console_mode = 0 + success = False + vt = bool(success and console_mode & ENABLE_VIRTUAL_TERMINAL_PROCESSING) truecolor = False if vt: win_version = sys.getwindowsversion() diff --git a/pipenv/patched/notpip/_vendor/rich/_windows_renderer.py b/pipenv/patched/notpip/_vendor/rich/_windows_renderer.py new file mode 100644 index 0000000000..a8c42a1e8f --- /dev/null +++ b/pipenv/patched/notpip/_vendor/rich/_windows_renderer.py @@ -0,0 +1,53 @@ +from typing import Iterable, Sequence, Tuple, cast + +from pipenv.patched.notpip._vendor.rich._win32_console import LegacyWindowsTerm, WindowsCoordinates +from pipenv.patched.notpip._vendor.rich.segment import ControlCode, ControlType, Segment + + +def legacy_windows_render(buffer: Iterable[Segment], term: LegacyWindowsTerm) -> None: + """Makes appropriate Windows Console API calls based on the segments in the buffer. + + Args: + buffer (Iterable[Segment]): Iterable of Segments to convert to Win32 API calls. + term (LegacyWindowsTerm): Used to call the Windows Console API. + """ + for text, style, control in buffer: + if not control: + if style: + term.write_styled(text, style) + else: + term.write_text(text) + else: + control_codes: Sequence[ControlCode] = control + for control_code in control_codes: + control_type = control_code[0] + if control_type == ControlType.CURSOR_MOVE_TO: + _, x, y = cast(Tuple[ControlType, int, int], control_code) + term.move_cursor_to(WindowsCoordinates(row=y - 1, col=x - 1)) + elif control_type == ControlType.CARRIAGE_RETURN: + term.write_text("\r") + elif control_type == ControlType.HOME: + term.move_cursor_to(WindowsCoordinates(0, 0)) + elif control_type == ControlType.CURSOR_UP: + term.move_cursor_up() + elif control_type == ControlType.CURSOR_DOWN: + term.move_cursor_down() + elif control_type == ControlType.CURSOR_FORWARD: + term.move_cursor_forward() + elif control_type == ControlType.CURSOR_BACKWARD: + term.move_cursor_backward() + elif control_type == ControlType.CURSOR_MOVE_TO_COLUMN: + _, column = cast(Tuple[ControlType, int], control_code) + term.move_cursor_to_column(column - 1) + elif control_type == ControlType.HIDE_CURSOR: + term.hide_cursor() + elif control_type == ControlType.SHOW_CURSOR: + term.show_cursor() + elif control_type == ControlType.ERASE_IN_LINE: + _, mode = cast(Tuple[ControlType, int], control_code) + if mode == 0: + term.erase_end_of_line() + elif mode == 1: + term.erase_start_of_line() + elif mode == 2: + term.erase_line() diff --git a/pipenv/patched/notpip/_vendor/rich/align.py b/pipenv/patched/notpip/_vendor/rich/align.py index ee235a92f6..5abe426345 100644 --- a/pipenv/patched/notpip/_vendor/rich/align.py +++ b/pipenv/patched/notpip/_vendor/rich/align.py @@ -18,7 +18,6 @@ AlignMethod = Literal["left", "center", "right"] VerticalAlignMethod = Literal["top", "middle", "bottom"] -AlignValues = AlignMethod # TODO: deprecate AlignValues class Align(JupyterMixin): diff --git a/pipenv/patched/notpip/_vendor/rich/ansi.py b/pipenv/patched/notpip/_vendor/rich/ansi.py index 92e4772edd..d4c32cef1e 100644 --- a/pipenv/patched/notpip/_vendor/rich/ansi.py +++ b/pipenv/patched/notpip/_vendor/rich/ansi.py @@ -1,21 +1,27 @@ -from contextlib import suppress import re -from typing import Iterable, NamedTuple +import sys +from contextlib import suppress +from typing import Iterable, NamedTuple, Optional from .color import Color from .style import Style from .text import Text -re_ansi = re.compile(r"(?:\x1b\[(.*?)m)|(?:\x1b\](.*?)\x1b\\)") -re_csi = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])") +re_ansi = re.compile( + r""" +(?:\x1b\](.*?)\x1b\\)| +(?:\x1b([(@-Z\\-_]|\[[0-?]*[ -/]*[@-~])) +""", + re.VERBOSE, +) class _AnsiToken(NamedTuple): """Result of ansi tokenized string.""" plain: str = "" - sgr: str = "" - osc: str = "" + sgr: Optional[str] = "" + osc: Optional[str] = "" def _ansi_tokenize(ansi_text: str) -> Iterable[_AnsiToken]: @@ -28,20 +34,22 @@ def _ansi_tokenize(ansi_text: str) -> Iterable[_AnsiToken]: AnsiToken: A named tuple of (plain, sgr, osc) """ - def remove_csi(ansi_text: str) -> str: - """Remove unknown CSI sequences.""" - return re_csi.sub("", ansi_text) - position = 0 + sgr: Optional[str] + osc: Optional[str] for match in re_ansi.finditer(ansi_text): start, end = match.span(0) - sgr, osc = match.groups() + osc, sgr = match.groups() if start > position: - yield _AnsiToken(remove_csi(ansi_text[position:start])) - yield _AnsiToken("", sgr, osc) + yield _AnsiToken(ansi_text[position:start]) + if sgr: + if sgr.endswith("m"): + yield _AnsiToken("", sgr[1:-1], osc) + else: + yield _AnsiToken("", sgr, osc) position = end if position < len(ansi_text): - yield _AnsiToken(remove_csi(ansi_text[position:])) + yield _AnsiToken(ansi_text[position:]) SGR_STYLE_MAP = { @@ -138,20 +146,21 @@ def decode_line(self, line: str) -> Text: text = Text() append = text.append line = line.rsplit("\r", 1)[-1] - for token in _ansi_tokenize(line): - plain_text, sgr, osc = token + for plain_text, sgr, osc in _ansi_tokenize(line): if plain_text: append(plain_text, self.style or None) - elif osc: + elif osc is not None: if osc.startswith("8;"): _params, semicolon, link = osc[2:].partition(";") if semicolon: self.style = self.style.update_link(link or None) - elif sgr: + elif sgr is not None: # Translate in to semi-colon separated codes # Ignore invalid codes, because we want to be lenient codes = [ - min(255, int(_code)) for _code in sgr.split(";") if _code.isdigit() + min(255, int(_code) if _code else 0) + for _code in sgr.split(";") + if _code.isdigit() or _code == "" ] iter_codes = iter(codes) for code in iter_codes: @@ -198,10 +207,10 @@ def decode_line(self, line: str) -> Text: return text -if __name__ == "__main__": # pragma: no cover - import pty +if sys.platform != "win32" and __name__ == "__main__": # pragma: no cover import io import os + import pty import sys decoder = AnsiDecoder() diff --git a/pipenv/patched/notpip/_vendor/rich/cells.py b/pipenv/patched/notpip/_vendor/rich/cells.py index e824ea2a6d..d7adf5a046 100644 --- a/pipenv/patched/notpip/_vendor/rich/cells.py +++ b/pipenv/patched/notpip/_vendor/rich/cells.py @@ -1,5 +1,5 @@ -from functools import lru_cache import re +from functools import lru_cache from typing import Dict, List from ._cell_widths import CELL_WIDTHS @@ -18,17 +18,14 @@ def cell_len(text: str, _cache: Dict[str, int] = LRUCache(1024 * 4)) -> int: Returns: int: Get the number of cells required to display text. """ - - if _is_single_cell_widths(text): - return len(text) - else: - cached_result = _cache.get(text, None) - if cached_result is not None: - return cached_result - _get_size = get_character_cell_size - total_size = sum(_get_size(character) for character in text) - if len(text) <= 64: - _cache[text] = total_size + cached_result = _cache.get(text, None) + if cached_result is not None: + return cached_result + + _get_size = get_character_cell_size + total_size = sum(_get_size(character) for character in text) + if len(text) <= 512: + _cache[text] = total_size return total_size @@ -42,9 +39,6 @@ def get_character_cell_size(character: str) -> int: Returns: int: Number of cells (0, 1 or 2) occupied by that character. """ - if _is_single_cell_widths(character): - return 1 - return _get_codepoint_cell_size(ord(character)) @@ -119,14 +113,12 @@ def chop_cells(text: str, max_size: int, position: int = 0) -> List[str]: _get_character_cell_size = get_character_cell_size characters = [ (character, _get_character_cell_size(character)) for character in text - ][::-1] + ] total_size = position lines: List[List[str]] = [[]] append = lines[-1].append - pop = characters.pop - while characters: - character, size = pop() + for character, size in reversed(characters): if total_size + size > max_size: lines.append([character]) append = lines[-1].append @@ -134,6 +126,7 @@ def chop_cells(text: str, max_size: int, position: int = 0) -> List[str]: else: total_size += size append(character) + return ["".join(line) for line in lines] diff --git a/pipenv/patched/notpip/_vendor/rich/color.py b/pipenv/patched/notpip/_vendor/rich/color.py index f0fa026d64..6bca2da922 100644 --- a/pipenv/patched/notpip/_vendor/rich/color.py +++ b/pipenv/patched/notpip/_vendor/rich/color.py @@ -7,7 +7,7 @@ from ._palettes import EIGHT_BIT_PALETTE, STANDARD_PALETTE, WINDOWS_PALETTE from .color_triplet import ColorTriplet -from .repr import rich_repr, Result +from .repr import Result, rich_repr from .terminal_theme import DEFAULT_TERMINAL_THEME if TYPE_CHECKING: # pragma: no cover @@ -61,6 +61,7 @@ def __repr__(self) -> str: "bright_cyan": 14, "bright_white": 15, "grey0": 16, + "gray0": 16, "navy_blue": 17, "dark_blue": 18, "blue3": 20, @@ -96,6 +97,7 @@ def __repr__(self) -> str: "blue_violet": 57, "orange4": 94, "grey37": 59, + "gray37": 59, "medium_purple4": 60, "slate_blue3": 62, "royal_blue1": 63, @@ -128,7 +130,9 @@ def __repr__(self) -> str: "yellow4": 106, "wheat4": 101, "grey53": 102, + "gray53": 102, "light_slate_grey": 103, + "light_slate_gray": 103, "medium_purple": 104, "light_slate_blue": 105, "dark_olive_green3": 149, @@ -155,11 +159,13 @@ def __repr__(self) -> str: "light_salmon3": 173, "rosy_brown": 138, "grey63": 139, + "gray63": 139, "medium_purple1": 141, "gold3": 178, "dark_khaki": 143, "navajo_white3": 144, "grey69": 145, + "gray69": 145, "light_steel_blue3": 146, "light_steel_blue": 147, "yellow3": 184, @@ -189,6 +195,7 @@ def __repr__(self) -> str: "light_goldenrod2": 222, "light_yellow3": 187, "grey84": 188, + "gray84": 188, "light_steel_blue1": 189, "yellow2": 190, "dark_olive_green1": 192, @@ -223,30 +230,55 @@ def __repr__(self) -> str: "wheat1": 229, "cornsilk1": 230, "grey100": 231, + "gray100": 231, "grey3": 232, + "gray3": 232, "grey7": 233, + "gray7": 233, "grey11": 234, + "gray11": 234, "grey15": 235, + "gray15": 235, "grey19": 236, + "gray19": 236, "grey23": 237, + "gray23": 237, "grey27": 238, + "gray27": 238, "grey30": 239, + "gray30": 239, "grey35": 240, + "gray35": 240, "grey39": 241, + "gray39": 241, "grey42": 242, + "gray42": 242, "grey46": 243, + "gray46": 243, "grey50": 244, + "gray50": 244, "grey54": 245, + "gray54": 245, "grey58": 246, + "gray58": 246, "grey62": 247, + "gray62": 247, "grey66": 248, + "gray66": 248, "grey70": 249, + "gray70": 249, "grey74": 250, + "gray74": 250, "grey78": 251, + "gray78": 251, "grey82": 252, + "gray82": 252, "grey85": 253, + "gray85": 253, "grey89": 254, + "gray89": 254, "grey93": 255, + "gray93": 255, } @@ -279,8 +311,8 @@ class Color(NamedTuple): def __rich__(self) -> "Text": """Dispays the actual color if Rich printed.""" - from .text import Text from .style import Style + from .text import Text return Text.assemble( f" @@ -104,6 +115,127 @@ class NoChange: """ +CONSOLE_SVG_FORMAT = """\ + + + + +
+
+
+ + + + + +
{title}
+
+
+ {code} +
+
+
+ +
+
+""" + _TERM_COLORS = {"256color": ColorSystem.EIGHT_BIT, "16color": ColorSystem.STANDARD} @@ -224,6 +356,16 @@ def update_height(self, height: int) -> "ConsoleOptions": options.max_height = options.height = height return options + def reset_height(self) -> "ConsoleOptions": + """Return a copy of the options with height set to ``None``. + + Returns: + ~ConsoleOptions: New console options instance. + """ + options = self.copy() + options.height = None + return options + def update_dimensions(self, width: int, height: int) -> "ConsoleOptions": """Update the width and height, and return a copy. @@ -244,7 +386,9 @@ def update_dimensions(self, width: int, height: int) -> "ConsoleOptions": class RichCast(Protocol): """An object that may be 'cast' to a console renderable.""" - def __rich__(self) -> Union["ConsoleRenderable", str]: # pragma: no cover + def __rich__( + self, + ) -> Union["ConsoleRenderable", "RichCast", str]: # pragma: no cover ... @@ -261,11 +405,9 @@ def __rich_console__( # A type that may be rendered by Console. RenderableType = Union[ConsoleRenderable, RichCast, str] - # The result of calling a __rich_console__ method. RenderResult = Iterable[Union[RenderableType, Segment]] - _null_highlighter = NullHighlighter() @@ -501,10 +643,10 @@ def _replace(*args: Any, **kwargs: Any) -> Group: def _is_jupyter() -> bool: # pragma: no cover """Check if we're running in a Jupyter notebook.""" try: - get_ipython # type: ignore + get_ipython # type: ignore[name-defined] except NameError: return False - ipython = get_ipython() # type: ignore + ipython = get_ipython() # type: ignore[name-defined] shell = ipython.__class__.__name__ if "google.colab" in str(ipython.__class__) or shell == "ZMQInteractiveShell": return True # Jupyter notebook or qtconsole @@ -521,7 +663,6 @@ def _is_jupyter() -> bool: # pragma: no cover "windows": ColorSystem.WINDOWS, } - _COLOR_SYSTEMS_NAMES = {system: name for name, system in COLOR_SYSTEMS.items()} @@ -571,12 +712,6 @@ def detect_legacy_windows() -> bool: return WINDOWS and not get_windows_console_features().vt -if detect_legacy_windows(): # pragma: no cover - from pipenv.patched.notpip._vendor.colorama import init - - init(strip=False) - - class Console: """A high level console interface. @@ -597,7 +732,7 @@ class Console: no_color (Optional[bool], optional): Enabled no color mode, or None to auto detect. Defaults to None. tab_size (int, optional): Number of spaces used to replace a tab character. Defaults to 8. record (bool, optional): Boolean to enable recording of terminal output, - required to call :meth:`export_html` and :meth:`export_text`. Defaults to False. + required to call :meth:`export_html`, :meth:`export_svg`, and :meth:`export_text`. Defaults to False. markup (bool, optional): Boolean to enable :ref:`console_markup`. Defaults to True. emoji (bool, optional): Enable emoji code. Defaults to True. emoji_variant (str, optional): Optional emoji variant, either "text" or "emoji". Defaults to None. @@ -1141,7 +1276,7 @@ def show_cursor(self, show: bool = True) -> bool: Args: show (bool, optional): Set visibility of the cursor. """ - if self.is_terminal and not self.legacy_windows: + if self.is_terminal: self.control(Control.show_cursor(show)) return True return False @@ -1232,7 +1367,7 @@ def render( renderable = rich_cast(renderable) if hasattr(renderable, "__rich_console__") and not isclass(renderable): - render_iterable = renderable.__rich_console__(self, _options) # type: ignore + render_iterable = renderable.__rich_console__(self, _options) # type: ignore[union-attr] elif isinstance(renderable, str): text_renderable = self.render_str( renderable, highlight=_options.highlight, markup=_options.markup @@ -1251,6 +1386,7 @@ def render( f"object {render_iterable!r} is not renderable" ) _Segment = Segment + _options = _options.reset_height() for render_output in iter_render: if isinstance(render_output, _Segment): yield render_output @@ -1322,7 +1458,7 @@ def render_str( highlight: Optional[bool] = None, highlighter: Optional[HighlighterType] = None, ) -> "Text": - """Convert a string to a Text instance. This is is called automatically if + """Convert a string to a Text instance. This is called automatically if you print or log a string. Args: @@ -1372,7 +1508,7 @@ def render_str( def get_style( self, name: Union[str, Style], *, default: Optional[Union[Style, str]] = None ) -> Style: - """Get a Style instance by it's theme name or parse a definition. + """Get a Style instance by its theme name or parse a definition. Args: name (str): The name of a style or a style definition. @@ -1904,43 +2040,72 @@ def log( buffer_extend(line) def _check_buffer(self) -> None: - """Check if the buffer may be rendered.""" + """Check if the buffer may be rendered. Render it if it can (e.g. Console.quiet is False) + Rendering is supported on Windows, Unix and Jupyter environments. For + legacy Windows consoles, the win32 API is called directly. + This method will also record what it renders if recording is enabled via Console.record. + """ if self.quiet: del self._buffer[:] return with self._lock: if self._buffer_index == 0: + + if self.record: + with self._record_buffer_lock: + self._record_buffer.extend(self._buffer[:]) + if self.is_jupyter: # pragma: no cover from .jupyter import display display(self._buffer, self._render_buffer(self._buffer[:])) del self._buffer[:] else: - text = self._render_buffer(self._buffer[:]) - del self._buffer[:] - if text: - try: - if WINDOWS: # pragma: no cover - # https://bugs.python.org/issue37871 - write = self.file.write - for line in text.splitlines(True): + if WINDOWS: + use_legacy_windows_render = False + if self.legacy_windows: + try: + use_legacy_windows_render = ( + self.file.fileno() in _STD_STREAMS + ) + except (ValueError, io.UnsupportedOperation): + pass + + if use_legacy_windows_render: + from pipenv.patched.notpip._vendor.rich._win32_console import LegacyWindowsTerm + from pipenv.patched.notpip._vendor.rich._windows_renderer import legacy_windows_render + + legacy_windows_render( + self._buffer[:], LegacyWindowsTerm(self.file) + ) + else: + # Either a non-std stream on legacy Windows, or modern Windows. + text = self._render_buffer(self._buffer[:]) + # https://bugs.python.org/issue37871 + write = self.file.write + for line in text.splitlines(True): + try: write(line) - else: - self.file.write(text) - self.file.flush() + except UnicodeEncodeError as error: + error.reason = f"{error.reason}\n*** You may need to add PYTHONIOENCODING=utf-8 to your environment ***" + raise + else: + text = self._render_buffer(self._buffer[:]) + try: + self.file.write(text) except UnicodeEncodeError as error: error.reason = f"{error.reason}\n*** You may need to add PYTHONIOENCODING=utf-8 to your environment ***" raise + self.file.flush() + del self._buffer[:] + def _render_buffer(self, buffer: Iterable[Segment]) -> str: """Render buffered output, and clear buffer.""" output: List[str] = [] append = output.append color_system = self._color_system legacy_windows = self.legacy_windows - if self.record: - with self._record_buffer_lock: - self._record_buffer.extend(buffer) not_terminal = not self.is_terminal if self.no_color and color_system: buffer = Segment.remove_color(buffer) @@ -1982,23 +2147,15 @@ def input( Returns: str: Text read from stdin. """ - prompt_str = "" if prompt: - with self.capture() as capture: - self.print(prompt, markup=markup, emoji=emoji, end="") - prompt_str = capture.get() - if self.legacy_windows: - # Legacy windows doesn't like ANSI codes in getpass or input (colorama bug)? - self.file.write(prompt_str) - prompt_str = "" + self.print(prompt, markup=markup, emoji=emoji, end="") if password: - result = getpass(prompt_str, stream=stream) + result = getpass("", stream=stream) else: if stream: - self.file.write(prompt_str) result = stream.readline() else: - result = input(prompt_str) + result = input() return result def export_text(self, *, clear: bool = True, styles: bool = False) -> str: @@ -2060,8 +2217,8 @@ def export_html( Args: theme (TerminalTheme, optional): TerminalTheme object containing console colors. clear (bool, optional): Clear record buffer after exporting. Defaults to ``True``. - code_format (str, optional): Format string to render HTML, should contain {foreground} - {background} and {code}. + code_format (str, optional): Format string to render HTML. In addition to '{foreground}', + '{background}', and '{code}', should contain '{stylesheet}' if inline_styles is ``False``. inline_styles (bool, optional): If ``True`` styles will be inlined in to spans, which makes files larger but easier to cut and paste markup. If ``False``, styles will be embedded in a style tag. Defaults to False. @@ -2137,8 +2294,8 @@ def save_html( path (str): Path to write html file. theme (TerminalTheme, optional): TerminalTheme object containing console colors. clear (bool, optional): Clear record buffer after exporting. Defaults to ``True``. - code_format (str, optional): Format string to render HTML, should contain {foreground} - {background} and {code}. + code_format (str, optional): Format string to render HTML. In addition to '{foreground}', + '{background}', and '{code}', should contain '{stylesheet}' if inline_styles is ``False``. inline_styles (bool, optional): If ``True`` styles will be inlined in to spans, which makes files larger but easier to cut and paste markup. If ``False``, styles will be embedded in a style tag. Defaults to False. @@ -2153,9 +2310,173 @@ def save_html( with open(path, "wt", encoding="utf-8") as write_file: write_file.write(html) + def export_svg( + self, + *, + title: str = "Rich", + theme: Optional[TerminalTheme] = None, + clear: bool = True, + code_format: str = CONSOLE_SVG_FORMAT, + ) -> str: + """Generate an SVG string from the console contents (requires record=True in Console constructor) + + Args: + title (str): The title of the tab in the output image + theme (TerminalTheme, optional): The ``TerminalTheme`` object to use to style the terminal + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True`` + code_format (str): Format string used to generate the SVG. Rich will inject a number of variables + into the string in order to form the final SVG output. The default template used and the variables + injected by Rich can be found by inspecting the ``console.CONSOLE_SVG_FORMAT`` variable. + + Returns: + str: The string representation of the SVG. That is, the ``code_format`` template with content injected. + """ + assert ( + self.record + ), "To export console contents set record=True in the constructor or instance" + + _theme = theme or SVG_EXPORT_THEME + + with self._record_buffer_lock: + segments = Segment.simplify(self._record_buffer) + segments = Segment.filter_control(segments) + parts = [(text, style or Style.null()) for text, style, _ in segments] + terminal_text = Text.assemble(*parts) + lines = terminal_text.wrap(self, width=self.width, overflow="fold") + segments = self.render(lines, options=self.options) + segment_lines = list( + Segment.split_and_crop_lines( + segments, length=self.width, include_new_lines=False + ) + ) + + fragments: List[str] = [] + theme_foreground_color = _theme.foreground_color.hex + theme_background_color = _theme.background_color.hex + + theme_foreground_css = f"color: {theme_foreground_color}; text-decoration-color: {theme_foreground_color};" + theme_background_css = f"background-color: {theme_background_color};" + + theme_css = theme_foreground_css + theme_background_css + + styles: Dict[str, int] = {} + styles[theme_css] = 1 + + for line in segment_lines: + line_spans = [] + for segment in line: + text, style, _ = segment + text = escape(text) + if style: + rules = style.get_html_style(_theme) + if style.link: + text = f'{text}' + + if style.blink or style.blink2: + text = f'{text}' + + # If the style doesn't contain a color, we still + # need to make sure we output the default foreground color + # from the TerminalTheme. + if not style.reverse: + foreground_css = theme_foreground_css + background_css = theme_background_css + else: + foreground_css = f"color: {theme_background_color}; text-decoration-color: {theme_background_color};" + background_css = ( + f"background-color: {theme_foreground_color};" + ) + + if style.color is None: + rules += f";{foreground_css}" + if style.bgcolor is None: + rules += f";{background_css}" + + style_number = styles.setdefault(rules, len(styles) + 1) + text = f'{text}' + else: + text = f'{text}' + line_spans.append(text) + + fragments.append(f"
{''.join(line_spans)}
") + + stylesheet_rules = [] + for style_rule, style_number in styles.items(): + if style_rule: + stylesheet_rules.append(f".r{style_number} {{{ style_rule }}}") + stylesheet = "\n".join(stylesheet_rules) + + if clear: + self._record_buffer.clear() + + # These values are the ones that I found to work well after experimentation. + # Many of them can be tweaked, but too much variation from these values could + # result in visually broken output/clipping issues. + terminal_padding = 12 + font_size = 18 + line_height = font_size + 4 + code_start_y = 60 + required_code_height = line_height * len(lines) + margin = 140 + + # Monospace fonts are generally around 0.5-0.55 width/height ratio, but I've + # added extra width to ensure that the output SVG is big enough. + monospace_font_width_scale = 0.60 + + # This works out as a good heuristic for the final size of the drawn terminal. + terminal_height = required_code_height + code_start_y + terminal_width = ( + self.width * monospace_font_width_scale * font_size + + 2 * terminal_padding + + self.width + ) + total_height = terminal_height + 2 * margin + total_width = terminal_width + 2 * margin + + rendered_code = code_format.format( + code="\n".join(fragments), + total_height=total_height, + total_width=total_width, + theme_foreground_color=theme_foreground_color, + theme_background_color=theme_background_color, + margin=margin, + font_size=font_size, + line_height=line_height, + title=title, + stylesheet=stylesheet, + ) + + return rendered_code + + def save_svg( + self, + path: str, + *, + title: str = "Rich", + theme: Optional[TerminalTheme] = None, + clear: bool = True, + code_format: str = CONSOLE_SVG_FORMAT, + ) -> None: + """Generate an SVG file from the console contents (requires record=True in Console constructor). + + Args: + path (str): The path to write the SVG to. + title (str): The title of the tab in the output image + theme (TerminalTheme, optional): The ``TerminalTheme`` object to use to style the terminal + clear (bool, optional): Clear record buffer after exporting. Defaults to ``True`` + code_format (str): Format string used to generate the SVG. Rich will inject a number of variables + into the string in order to form the final SVG output. The default template used and the variables + injected by Rich can be found by inspecting the ``console.CONSOLE_SVG_FORMAT`` variable. + """ + svg = self.export_svg( + title=title, theme=theme, clear=clear, code_format=code_format + ) + with open(path, "wt", encoding="utf-8") as write_file: + write_file.write(svg) + if __name__ == "__main__": # pragma: no cover - console = Console() + console = Console(record=True) console.log( "JSONRPC [i]request[/i]", @@ -2208,4 +2529,3 @@ def save_html( }, } ) - console.log("foo") diff --git a/pipenv/patched/notpip/_vendor/rich/control.py b/pipenv/patched/notpip/_vendor/rich/control.py index c98d0d7d98..e17b2c6349 100644 --- a/pipenv/patched/notpip/_vendor/rich/control.py +++ b/pipenv/patched/notpip/_vendor/rich/control.py @@ -1,4 +1,4 @@ -from typing import Any, Callable, Dict, Iterable, List, TYPE_CHECKING, Union +from typing import Callable, Dict, Iterable, List, TYPE_CHECKING, Union from .segment import ControlCode, ControlType, Segment diff --git a/pipenv/patched/notpip/_vendor/rich/default_styles.py b/pipenv/patched/notpip/_vendor/rich/default_styles.py index 1f930bba01..b987f63bdb 100644 --- a/pipenv/patched/notpip/_vendor/rich/default_styles.py +++ b/pipenv/patched/notpip/_vendor/rich/default_styles.py @@ -2,7 +2,6 @@ from .style import Style - DEFAULT_STYLES: Dict[str, Style] = { "none": Style.null(), "reset": Style( @@ -41,6 +40,7 @@ "inspect.attr.dunder": Style(color="yellow", italic=True, dim=True), "inspect.callable": Style(bold=True, color="red"), "inspect.def": Style(italic=True, color="bright_cyan"), + "inspect.class": Style(italic=True, color="bright_cyan"), "inspect.error": Style(bold=True, color="red"), "inspect.equals": Style(), "inspect.help": Style(color="cyan"), diff --git a/pipenv/patched/notpip/_vendor/rich/diagnose.py b/pipenv/patched/notpip/_vendor/rich/diagnose.py index 7175a7d0a4..39caf4b1f3 100644 --- a/pipenv/patched/notpip/_vendor/rich/diagnose.py +++ b/pipenv/patched/notpip/_vendor/rich/diagnose.py @@ -1,6 +1,35 @@ -if __name__ == "__main__": # pragma: no cover - from pipenv.patched.notpip._vendor.rich.console import Console - from pipenv.patched.notpip._vendor.rich import inspect +import os +import platform + +from pipenv.patched.notpip._vendor.rich import inspect +from pipenv.patched.notpip._vendor.rich.console import Console, get_windows_console_features +from pipenv.patched.notpip._vendor.rich.panel import Panel +from pipenv.patched.notpip._vendor.rich.pretty import Pretty + +def report() -> None: # pragma: no cover + """Print a report to the terminal with debugging information""" console = Console() inspect(console) + features = get_windows_console_features() + inspect(features) + + env_names = ( + "TERM", + "COLORTERM", + "CLICOLOR", + "NO_COLOR", + "TERM_PROGRAM", + "COLUMNS", + "LINES", + "JPY_PARENT_PID", + "VSCODE_VERBOSE_LOGGING", + ) + env = {name: os.getenv(name) for name in env_names} + console.print(Panel.fit((Pretty(env)), title="[b]Environment Variables")) + + console.print(f'platform="{platform.system()}"') + + +if __name__ == "__main__": # pragma: no cover + report() diff --git a/pipenv/patched/notpip/_vendor/rich/filesize.py b/pipenv/patched/notpip/_vendor/rich/filesize.py index b3a0996b05..61be47510f 100644 --- a/pipenv/patched/notpip/_vendor/rich/filesize.py +++ b/pipenv/patched/notpip/_vendor/rich/filesize.py @@ -13,7 +13,7 @@ __all__ = ["decimal"] -from typing import Iterable, List, Tuple, Optional +from typing import Iterable, List, Optional, Tuple def _to_str( @@ -30,7 +30,7 @@ def _to_str( return "{:,} bytes".format(size) for i, suffix in enumerate(suffixes, 2): # noqa: B007 - unit = base ** i + unit = base**i if size < unit: break return "{:,.{precision}f}{separator}{}".format( @@ -44,7 +44,7 @@ def _to_str( def pick_unit_and_suffix(size: int, suffixes: List[str], base: int) -> Tuple[int, str]: """Pick a suffix and base for the given size.""" for i, suffix in enumerate(suffixes): - unit = base ** i + unit = base**i if size < unit * base: break return unit, suffix diff --git a/pipenv/patched/notpip/_vendor/rich/highlighter.py b/pipenv/patched/notpip/_vendor/rich/highlighter.py index 8afdd017b6..7bee4167e4 100644 --- a/pipenv/patched/notpip/_vendor/rich/highlighter.py +++ b/pipenv/patched/notpip/_vendor/rich/highlighter.py @@ -1,7 +1,8 @@ +import re from abc import ABC, abstractmethod from typing import List, Union -from .text import Text +from .text import Span, Text def _combine_regex(*regexes: str) -> str: @@ -81,22 +82,22 @@ class ReprHighlighter(RegexHighlighter): base_style = "repr." highlights = [ - r"(?P\<)(?P[\w\-\.\:]*)(?P[\w\W]*?)(?P\>)", - r"(?P[\w_]{1,50})=(?P\"?[\w_]+\"?)?", - r"(?P[\{\[\(\)\]\}])", + r"(?P<)(?P[-\w.:|]*)(?P[\w\W]*?)(?P>)", + r'(?P[\w_]{1,50})=(?P"?[\w_]+"?)?', + r"(?P[][{}()])", _combine_regex( r"(?P[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})", r"(?P([A-Fa-f0-9]{1,4}::?){1,7}[A-Fa-f0-9]{1,4})", r"(?P(?:[0-9A-Fa-f]{1,2}-){7}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{1,2}:){7}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{4}\.){3}[0-9A-Fa-f]{4})", r"(?P(?:[0-9A-Fa-f]{1,2}-){5}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{1,2}:){5}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{4}\.){2}[0-9A-Fa-f]{4})", - r"(?P[\w\.]*?)\(", + r"(?P[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12})", + r"(?P[\w.]*?)\(", r"\b(?PTrue)\b|\b(?PFalse)\b|\b(?PNone)\b", r"(?P\.\.\.)", - r"(?P(?\B(\/[\w\.\-\_\+]+)*\/)(?P[\w\.\-\_\+]*)?", - r"(?b?\'\'\'.*?(?[a-fA-F0-9]{8}\-[a-fA-F0-9]{4}\-[a-fA-F0-9]{4}\-[a-fA-F0-9]{4}\-[a-fA-F0-9]{12})", - r"(?P(file|https|http|ws|wss):\/\/[0-9a-zA-Z\$\-\_\+\!`\(\)\,\.\?\/\;\:\&\=\%\#]*)", + r"(?P(?\B(/[-\w._+]+)*\/)(?P[-\w._+]*)?", + r"(?b?'''.*?(?(file|https|http|ws|wss)://[-0-9a-zA-Z$_+!`(),.?/;:&=%#]*)", ), ] @@ -104,17 +105,39 @@ class ReprHighlighter(RegexHighlighter): class JSONHighlighter(RegexHighlighter): """Highlights JSON""" + # Captures the start and end of JSON strings, handling escaped quotes + JSON_STR = r"(?b?\".*?(?[\{\[\(\)\]\}])", r"\b(?Ptrue)\b|\b(?Pfalse)\b|\b(?Pnull)\b", r"(?P(?b?\".*?(?b?\".*?(? None: + super().highlight(text) + + # Additional work to handle highlighting JSON keys + plain = text.plain + append = text.spans.append + whitespace = self.JSON_WHITESPACE + for match in re.finditer(self.JSON_STR, plain): + start, end = match.span() + cursor = end + while cursor < len(plain): + char = plain[cursor] + cursor += 1 + if char == ":": + append(Span(start, end, "json.key")) + elif char in whitespace: + continue + break + if __name__ == "__main__": # pragma: no cover from .console import Console @@ -145,3 +168,6 @@ class JSONHighlighter(RegexHighlighter): console.print( "127.0.1.1 bar 192.168.1.4 2001:0db8:85a3:0000:0000:8a2e:0370:7334 foo" ) + import json + + console.print_json(json.dumps(obj={"name": "apple", "count": 1}), indent=None) diff --git a/pipenv/patched/notpip/_vendor/rich/jupyter.py b/pipenv/patched/notpip/_vendor/rich/jupyter.py index bedf5cb19a..f2e3d577f7 100644 --- a/pipenv/patched/notpip/_vendor/rich/jupyter.py +++ b/pipenv/patched/notpip/_vendor/rich/jupyter.py @@ -1,9 +1,12 @@ -from typing import Any, Dict, Iterable, List +from typing import TYPE_CHECKING, Any, Dict, Iterable, List from . import get_console from .segment import Segment from .terminal_theme import DEFAULT_TERMINAL_THEME +if TYPE_CHECKING: + from pipenv.patched.notpip._vendor.rich.console import ConsoleRenderable + JUPYTER_HTML_FORMAT = """\
{code}
""" @@ -33,10 +36,13 @@ class JupyterMixin: __slots__ = () def _repr_mimebundle_( - self, include: Iterable[str], exclude: Iterable[str], **kwargs: Any + self: "ConsoleRenderable", + include: Iterable[str], + exclude: Iterable[str], + **kwargs: Any, ) -> Dict[str, str]: console = get_console() - segments = list(console.render(self, console.options)) # type: ignore + segments = list(console.render(self, console.options)) html = _render_segments(segments) text = console._render_buffer(segments) data = {"text/plain": text, "text/html": html} @@ -63,7 +69,7 @@ def escape(text: str) -> str: rule = style.get_html_style(theme) text = f'{text}' if rule else text if style.link: - text = f'{text}' + text = f'{text}' append_fragment(text) code = "".join(fragments) diff --git a/pipenv/patched/notpip/_vendor/rich/layout.py b/pipenv/patched/notpip/_vendor/rich/layout.py index 9e75f5ac39..24e5343a80 100644 --- a/pipenv/patched/notpip/_vendor/rich/layout.py +++ b/pipenv/patched/notpip/_vendor/rich/layout.py @@ -73,6 +73,7 @@ def __rich_console__( style=self.style, title=self.highlighter(title), border_style="blue", + height=height, ) @@ -299,7 +300,7 @@ def add_split(self, *layouts: Union["Layout", RenderableType]) -> None: self._children.extend(_layouts) def split_row(self, *layouts: Union["Layout", RenderableType]) -> None: - """Split the layout in tow a row (Layouts side by side). + """Split the layout in to a row (layouts side by side). Args: *layouts (Layout): Positional arguments should be (sub) Layout instances. diff --git a/pipenv/patched/notpip/_vendor/rich/logging.py b/pipenv/patched/notpip/_vendor/rich/logging.py index 002f1f7bf1..58188fd8a8 100644 --- a/pipenv/patched/notpip/_vendor/rich/logging.py +++ b/pipenv/patched/notpip/_vendor/rich/logging.py @@ -2,7 +2,8 @@ from datetime import datetime from logging import Handler, LogRecord from pathlib import Path -from typing import ClassVar, List, Optional, Type, Union +from types import ModuleType +from typing import ClassVar, List, Optional, Iterable, Type, Union from . import get_console from ._log_render import LogRender, FormatTimeCallable @@ -37,10 +38,12 @@ class RichHandler(Handler): tracebacks_theme (str, optional): Override pygments theme used in traceback. tracebacks_word_wrap (bool, optional): Enable word wrapping of long tracebacks lines. Defaults to True. tracebacks_show_locals (bool, optional): Enable display of locals in tracebacks. Defaults to False. + tracebacks_suppress (Sequence[Union[str, ModuleType]]): Optional sequence of modules or paths to exclude from traceback. locals_max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation. Defaults to 10. locals_max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to 80. log_time_format (Union[str, TimeFormatterCallable], optional): If ``log_time`` is enabled, either string for strftime or callable that formats the time. Defaults to "[%x %X] ". + keywords (List[str], optional): List of words to highlight instead of ``RichHandler.KEYWORDS``. """ KEYWORDS: ClassVar[Optional[List[str]]] = [ @@ -73,9 +76,11 @@ def __init__( tracebacks_theme: Optional[str] = None, tracebacks_word_wrap: bool = True, tracebacks_show_locals: bool = False, + tracebacks_suppress: Iterable[Union[str, ModuleType]] = (), locals_max_length: int = 10, locals_max_string: int = 80, log_time_format: Union[str, FormatTimeCallable] = "[%x %X]", + keywords: Optional[List[str]] = None, ) -> None: super().__init__(level=level) self.console = console or get_console() @@ -96,8 +101,10 @@ def __init__( self.tracebacks_theme = tracebacks_theme self.tracebacks_word_wrap = tracebacks_word_wrap self.tracebacks_show_locals = tracebacks_show_locals + self.tracebacks_suppress = tracebacks_suppress self.locals_max_length = locals_max_length self.locals_max_string = locals_max_string + self.keywords = keywords def get_level_text(self, record: LogRecord) -> Text: """Get the level name from the record. @@ -137,6 +144,7 @@ def emit(self, record: LogRecord) -> None: show_locals=self.tracebacks_show_locals, locals_max_length=self.locals_max_length, locals_max_string=self.locals_max_string, + suppress=self.tracebacks_suppress, ) message = record.getMessage() if self.formatter: @@ -171,8 +179,12 @@ def render_message(self, record: LogRecord, message: str) -> "ConsoleRenderable" if highlighter: message_text = highlighter(message_text) - if self.KEYWORDS: - message_text.highlight_words(self.KEYWORDS, "logging.keyword") + if self.keywords is None: + self.keywords = self.KEYWORDS + + if self.keywords: + message_text.highlight_words(self.keywords, "logging.keyword") + return message_text def render( diff --git a/pipenv/patched/notpip/_vendor/rich/markup.py b/pipenv/patched/notpip/_vendor/rich/markup.py index c95df685ca..bb6405c482 100644 --- a/pipenv/patched/notpip/_vendor/rich/markup.py +++ b/pipenv/patched/notpip/_vendor/rich/markup.py @@ -1,21 +1,20 @@ +import re from ast import literal_eval from operator import attrgetter -import re from typing import Callable, Iterable, List, Match, NamedTuple, Optional, Tuple, Union +from ._emoji_replace import _emoji_replace +from .emoji import EmojiVariant from .errors import MarkupError from .style import Style from .text import Span, Text -from .emoji import EmojiVariant -from ._emoji_replace import _emoji_replace - RE_TAGS = re.compile( - r"""((\\*)\[([a-z#\/@].*?)\])""", + r"""((\\*)\[([a-z#/@][^[]*?)])""", re.VERBOSE, ) -RE_HANDLER = re.compile(r"^([\w\.]*?)(\(.*?\))?$") +RE_HANDLER = re.compile(r"^([\w.]*?)(\(.*?\))?$") class Tag(NamedTuple): @@ -146,6 +145,8 @@ def pop_style(style_name: str) -> Tuple[int, Tag]: for position, plain_text, tag in _parse(markup): if plain_text is not None: + # Handle open brace escapes, where the brace is not part of a tag. + plain_text = plain_text.replace("\\[", "[") append(emoji_replace(plain_text) if emoji else plain_text) elif tag is not None: if tag.name.startswith("/"): # Closing tag @@ -233,8 +234,8 @@ def pop_style(style_name: str) -> Tuple[int, Tag]: ":warning-emoji: [bold red blink] DANGER![/]", ] - from pipenv.patched.notpip._vendor.rich.table import Table from pipenv.patched.notpip._vendor.rich import print + from pipenv.patched.notpip._vendor.rich.table import Table grid = Table("Markup", "Result", padding=(0, 1)) diff --git a/pipenv/patched/notpip/_vendor/rich/measure.py b/pipenv/patched/notpip/_vendor/rich/measure.py index aea238df93..e12787c8be 100644 --- a/pipenv/patched/notpip/_vendor/rich/measure.py +++ b/pipenv/patched/notpip/_vendor/rich/measure.py @@ -1,5 +1,5 @@ from operator import itemgetter -from typing import Callable, Iterable, NamedTuple, Optional, TYPE_CHECKING +from typing import TYPE_CHECKING, Callable, Iterable, NamedTuple, Optional from . import errors from .protocol import is_renderable, rich_cast @@ -96,7 +96,9 @@ def get( if _max_width < 1: return Measurement(0, 0) if isinstance(renderable, str): - renderable = console.render_str(renderable, markup=options.markup) + renderable = console.render_str( + renderable, markup=options.markup, highlight=False + ) renderable = rich_cast(renderable) if is_renderable(renderable): get_console_width: Optional[ diff --git a/pipenv/patched/notpip/_vendor/rich/pager.py b/pipenv/patched/notpip/_vendor/rich/pager.py index dbfb973e36..a3f7aa62af 100644 --- a/pipenv/patched/notpip/_vendor/rich/pager.py +++ b/pipenv/patched/notpip/_vendor/rich/pager.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from typing import Any, Callable +from typing import Any class Pager(ABC): diff --git a/pipenv/patched/notpip/_vendor/rich/panel.py b/pipenv/patched/notpip/_vendor/rich/panel.py index 151fe5f017..fc2807c313 100644 --- a/pipenv/patched/notpip/_vendor/rich/panel.py +++ b/pipenv/patched/notpip/_vendor/rich/panel.py @@ -1,14 +1,13 @@ -from typing import Optional, TYPE_CHECKING - -from .box import Box, ROUNDED +from typing import TYPE_CHECKING, Optional from .align import AlignMethod +from .box import ROUNDED, Box from .jupyter import JupyterMixin from .measure import Measurement, measure_renderables from .padding import Padding, PaddingDimensions +from .segment import Segment from .style import StyleType from .text import Text, TextType -from .segment import Segment if TYPE_CHECKING: from .console import Console, ConsoleOptions, RenderableType, RenderResult @@ -183,7 +182,7 @@ def __rich_console__( else: title_text.align(self.title_align, width - 4, character=box.top) yield Segment(box.top_left + box.top, border_style) - yield from console.render(title_text) + yield from console.render(title_text, child_options.update_width(width - 4)) yield Segment(box.top + box.top_right, border_style) yield new_line @@ -202,7 +201,9 @@ def __rich_console__( else: subtitle_text.align(self.subtitle_align, width - 4, character=box.bottom) yield Segment(box.bottom_left + box.bottom, border_style) - yield from console.render(subtitle_text) + yield from console.render( + subtitle_text, child_options.update_width(width - 4) + ) yield Segment(box.bottom + box.bottom_right, border_style) yield new_line @@ -235,8 +236,8 @@ def __rich_measure__( c = Console() + from .box import DOUBLE, ROUNDED from .padding import Padding - from .box import ROUNDED, DOUBLE p = Panel( "Hello, World!", diff --git a/pipenv/patched/notpip/_vendor/rich/pretty.py b/pipenv/patched/notpip/_vendor/rich/pretty.py index 4cbaebd252..a97fe83cfc 100644 --- a/pipenv/patched/notpip/_vendor/rich/pretty.py +++ b/pipenv/patched/notpip/_vendor/rich/pretty.py @@ -1,8 +1,8 @@ import builtins +import collections import dataclasses import inspect import os -import re import sys from array import array from collections import Counter, UserDict, UserList, defaultdict, deque @@ -29,8 +29,7 @@ try: import pipenv.vendor.attr as _attr_module except ImportError: # pragma: no cover - _attr_module = None # type: ignore - + _attr_module = None # type: ignore[assignment] from . import get_console from ._loop import loop_last @@ -80,6 +79,29 @@ def _is_dataclass_repr(obj: object) -> bool: return False +_dummy_namedtuple = collections.namedtuple("_dummy_namedtuple", []) + + +def _has_default_namedtuple_repr(obj: object) -> bool: + """Check if an instance of namedtuple contains the default repr + + Args: + obj (object): A namedtuple + + Returns: + bool: True if the default repr is used, False if there's a custom repr. + """ + obj_file = None + try: + obj_file = inspect.getfile(obj.__repr__) + except (OSError, TypeError): + # OSError handles case where object is defined in __main__ scope, e.g. REPL - no filename available. + # TypeError trapped defensively, in case of object without filename slips through. + pass + default_repr_file = inspect.getfile(_dummy_namedtuple.__repr__) + return obj_file == default_repr_file + + def _ipy_display_hook( value: Any, console: Optional["Console"] = None, @@ -93,7 +115,7 @@ def _ipy_display_hook( from .console import ConsoleRenderable # needed here to prevent circular import # always skip rich generated jupyter renderables or None values - if isinstance(value, JupyterRenderable) or value is None: + if _safe_isinstance(value, JupyterRenderable) or value is None: return console = console or get_console() @@ -124,12 +146,12 @@ def _ipy_display_hook( return # Delegate rendering to IPython # certain renderables should start on a new line - if isinstance(value, ConsoleRenderable): + if _safe_isinstance(value, ConsoleRenderable): console.line() console.print( value - if isinstance(value, RichRenderable) + if _safe_isinstance(value, RichRenderable) else Pretty( value, overflow=overflow, @@ -144,6 +166,16 @@ def _ipy_display_hook( ) +def _safe_isinstance( + obj: object, class_or_tuple: Union[type, Tuple[type, ...]] +) -> bool: + """isinstance can fail in rare cases, for example types with no __class__""" + try: + return isinstance(obj, class_or_tuple) + except Exception: + return False + + def install( console: Optional["Console"] = None, overflow: "OverflowMethod" = "ignore", @@ -175,10 +207,10 @@ def display_hook(value: Any) -> None: """Replacement sys.displayhook which prettifies objects with Rich.""" if value is not None: assert console is not None - builtins._ = None # type: ignore + builtins._ = None # type: ignore[attr-defined] console.print( value - if isinstance(value, RichRenderable) + if _safe_isinstance(value, RichRenderable) else Pretty( value, overflow=overflow, @@ -189,13 +221,13 @@ def display_hook(value: Any) -> None: ), crop=crop, ) - builtins._ = value # type: ignore + builtins._ = value # type: ignore[attr-defined] try: # pragma: no cover - ip = get_ipython() # type: ignore + ip = get_ipython() # type: ignore[name-defined] from IPython.core.formatters import BaseFormatter - class RichFormatter(BaseFormatter): # type: ignore + class RichFormatter(BaseFormatter): # type: ignore[misc] pprint: bool = True def __call__(self, value: Any) -> Any: @@ -314,6 +346,7 @@ def __rich_measure__( indent_size=self.indent_size, max_length=self.max_length, max_string=self.max_string, + expand_all=self.expand_all, ) text_width = ( max(cell_len(line) for line in pretty_str.splitlines()) if pretty_str else 0 @@ -355,7 +388,7 @@ def _get_braces_for_array(_object: "array[Any]") -> Tuple[str, str, str]: def is_expandable(obj: Any) -> bool: """Check if an object may be expanded by pretty print.""" return ( - isinstance(obj, _CONTAINERS) + _safe_isinstance(obj, _CONTAINERS) or (is_dataclass(obj)) or (hasattr(obj, "__rich_repr__")) or _is_attr_object(obj) @@ -373,6 +406,7 @@ class Node: empty: str = "" last: bool = False is_tuple: bool = False + is_namedtuple: bool = False children: Optional[List["Node"]] = None key_separator = ": " separator: str = ", " @@ -387,7 +421,7 @@ def iter_tokens(self) -> Iterable[str]: elif self.children is not None: if self.children: yield self.open_brace - if self.is_tuple and len(self.children) == 1: + if self.is_tuple and not self.is_namedtuple and len(self.children) == 1: yield from self.children[0].iter_tokens() yield "," else: @@ -514,6 +548,25 @@ def __str__(self) -> str: ) +def _is_namedtuple(obj: Any) -> bool: + """Checks if an object is most likely a namedtuple. It is possible + to craft an object that passes this check and isn't a namedtuple, but + there is only a minuscule chance of this happening unintentionally. + + Args: + obj (Any): The object to test + + Returns: + bool: True if the object is a namedtuple. False otherwise. + """ + try: + fields = getattr(obj, "_fields", None) + except Exception: + # Being very defensive - if we cannot get the attr then its not a namedtuple + return False + return isinstance(obj, tuple) and isinstance(fields, tuple) + + def traverse( _object: Any, max_length: Optional[int] = None, @@ -539,7 +592,7 @@ def to_repr(obj: Any) -> str: """Get repr string for an object, but catch errors.""" if ( max_string is not None - and isinstance(obj, (bytes, str)) + and _safe_isinstance(obj, (bytes, str)) and len(obj) > max_string ): truncated = len(obj) - max_string @@ -565,7 +618,7 @@ def _traverse(obj: Any, root: bool = False, depth: int = 0) -> Node: def iter_rich_args(rich_args: Any) -> Iterable[Union[Any, Tuple[str, Any]]]: for arg in rich_args: - if isinstance(arg, tuple): + if _safe_isinstance(arg, tuple): if len(arg) == 3: key, child, default = arg if default == child: @@ -622,7 +675,7 @@ def iter_rich_args(rich_args: Any) -> Iterable[Union[Any, Tuple[str, Any]]]: last=root, ) for last, arg in loop_last(args): - if isinstance(arg, tuple): + if _safe_isinstance(arg, tuple): key, child = arg child_node = _traverse(child, depth=depth + 1) child_node.last = last @@ -689,7 +742,7 @@ def iter_attrs() -> Iterable[ elif ( is_dataclass(obj) - and not isinstance(obj, type) + and not _safe_isinstance(obj, type) and not fake_attributes and (_is_dataclass_repr(obj) or py_version == (3, 6)) ): @@ -721,10 +774,28 @@ def iter_attrs() -> Iterable[ append(child_node) pop_visited(obj_id) - - elif isinstance(obj, _CONTAINERS): + elif _is_namedtuple(obj) and _has_default_namedtuple_repr(obj): + if reached_max_depth: + node = Node(value_repr="...") + else: + children = [] + class_name = obj.__class__.__name__ + node = Node( + open_brace=f"{class_name}(", + close_brace=")", + children=children, + empty=f"{class_name}()", + ) + append = children.append + for last, (key, value) in loop_last(obj._asdict().items()): + child_node = _traverse(value, depth=depth + 1) + child_node.key_repr = key + child_node.last = last + child_node.key_separator = "=" + append(child_node) + elif _safe_isinstance(obj, _CONTAINERS): for container_type in _CONTAINERS: - if isinstance(obj, container_type): + if _safe_isinstance(obj, container_type): obj_type = container_type break @@ -752,7 +823,7 @@ def iter_attrs() -> Iterable[ num_items = len(obj) last_item_index = num_items - 1 - if isinstance(obj, _MAPPING_CONTAINERS): + if _safe_isinstance(obj, _MAPPING_CONTAINERS): iter_items = iter(obj.items()) if max_length is not None: iter_items = islice(iter_items, max_length) @@ -770,14 +841,15 @@ def iter_attrs() -> Iterable[ child_node.last = index == last_item_index append(child_node) if max_length is not None and num_items > max_length: - append(Node(value_repr=f"... +{num_items-max_length}", last=True)) + append(Node(value_repr=f"... +{num_items - max_length}", last=True)) else: node = Node(empty=empty, children=[], last=root) pop_visited(obj_id) else: node = Node(value_repr=to_repr(obj), last=root) - node.is_tuple = isinstance(obj, tuple) + node.is_tuple = _safe_isinstance(obj, tuple) + node.is_namedtuple = _is_namedtuple(obj) return node node = _traverse(_object, root=True) @@ -812,13 +884,13 @@ def pretty_repr( str: A possibly multi-line representation of the object. """ - if isinstance(_object, Node): + if _safe_isinstance(_object, Node): node = _object else: node = traverse( _object, max_length=max_length, max_string=max_string, max_depth=max_depth ) - repr_str = node.render( + repr_str: str = node.render( max_width=max_width, indent_size=indent_size, expand_all=expand_all ) return repr_str @@ -868,6 +940,15 @@ def __repr__(self) -> str: 1 / 0 return "this will fail" + from typing import NamedTuple + + class StockKeepingUnit(NamedTuple): + name: str + description: str + price: float + category: str + reviews: List[str] + d = defaultdict(int) d["foo"] = 5 data = { @@ -894,9 +975,16 @@ def __repr__(self) -> str: ] ), "atomic": (False, True, None), + "namedtuple": StockKeepingUnit( + "Sparkling British Spring Water", + "Carbonated spring water", + 0.9, + "water", + ["its amazing!", "its terrible!"], + ), "Broken": BrokenRepr(), } - data["foo"].append(data) # type: ignore + data["foo"].append(data) # type: ignore[attr-defined] from pipenv.patched.notpip._vendor.rich import print diff --git a/pipenv/patched/notpip/_vendor/rich/progress.py b/pipenv/patched/notpip/_vendor/rich/progress.py index 1f670db438..5c57755deb 100644 --- a/pipenv/patched/notpip/_vendor/rich/progress.py +++ b/pipenv/patched/notpip/_vendor/rich/progress.py @@ -1,28 +1,44 @@ +import io +import sys +import typing +import warnings from abc import ABC, abstractmethod from collections import deque from collections.abc import Sized from dataclasses import dataclass, field from datetime import timedelta +from io import RawIOBase, UnsupportedOperation from math import ceil +from mmap import mmap +from os import PathLike, stat from threading import Event, RLock, Thread from types import TracebackType from typing import ( Any, + BinaryIO, Callable, + ContextManager, Deque, Dict, + Generic, Iterable, List, NamedTuple, NewType, Optional, Sequence, + TextIO, Tuple, Type, TypeVar, Union, ) +if sys.version_info >= (3, 8): + from typing import Literal +else: + from pipenv.patched.notpip._vendor.typing_extensions import Literal # pragma: no cover + from . import filesize, get_console from .console import Console, JustifyMethod, RenderableType, Group from .highlighter import Highlighter @@ -41,6 +57,9 @@ GetTimeCallable = Callable[[], float] +_I = typing.TypeVar("_I", TextIO, BinaryIO) + + class _TrackThread(Thread): """A thread to periodically update progress.""" @@ -149,6 +168,320 @@ def track( ) +class _Reader(RawIOBase, BinaryIO): + """A reader that tracks progress while it's being read from.""" + + def __init__( + self, + handle: BinaryIO, + progress: "Progress", + task: TaskID, + close_handle: bool = True, + ) -> None: + self.handle = handle + self.progress = progress + self.task = task + self.close_handle = close_handle + self._closed = False + + def __enter__(self) -> "_Reader": + self.handle.__enter__() + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.close() + + def __iter__(self) -> BinaryIO: + return self + + def __next__(self) -> bytes: + line = next(self.handle) + self.progress.advance(self.task, advance=len(line)) + return line + + @property + def closed(self) -> bool: + return self._closed + + def fileno(self) -> int: + return self.handle.fileno() + + def isatty(self) -> bool: + return self.handle.isatty() + + def readable(self) -> bool: + return self.handle.readable() + + def seekable(self) -> bool: + return self.handle.seekable() + + def writable(self) -> bool: + return False + + def read(self, size: int = -1) -> bytes: + block = self.handle.read(size) + self.progress.advance(self.task, advance=len(block)) + return block + + def readinto(self, b: Union[bytearray, memoryview, mmap]): # type: ignore[no-untyped-def, override] + n = self.handle.readinto(b) # type: ignore[attr-defined] + self.progress.advance(self.task, advance=n) + return n + + def readline(self, size: int = -1) -> bytes: # type: ignore[override] + line = self.handle.readline(size) + self.progress.advance(self.task, advance=len(line)) + return line + + def readlines(self, hint: int = -1) -> List[bytes]: + lines = self.handle.readlines(hint) + self.progress.advance(self.task, advance=sum(map(len, lines))) + return lines + + def close(self) -> None: + if self.close_handle: + self.handle.close() + self._closed = True + + def seek(self, offset: int, whence: int = 0) -> int: + pos = self.handle.seek(offset, whence) + self.progress.update(self.task, completed=pos) + return pos + + def tell(self) -> int: + return self.handle.tell() + + def write(self, s: Any) -> int: + raise UnsupportedOperation("write") + + +class _ReadContext(ContextManager[_I], Generic[_I]): + """A utility class to handle a context for both a reader and a progress.""" + + def __init__(self, progress: "Progress", reader: _I) -> None: + self.progress = progress + self.reader: _I = reader + + def __enter__(self) -> _I: + self.progress.start() + return self.reader.__enter__() + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.progress.stop() + self.reader.__exit__(exc_type, exc_val, exc_tb) + + +def wrap_file( + file: BinaryIO, + total: int, + *, + description: str = "Reading...", + auto_refresh: bool = True, + console: Optional[Console] = None, + transient: bool = False, + get_time: Optional[Callable[[], float]] = None, + refresh_per_second: float = 10, + style: StyleType = "bar.back", + complete_style: StyleType = "bar.complete", + finished_style: StyleType = "bar.finished", + pulse_style: StyleType = "bar.pulse", + disable: bool = False, +) -> ContextManager[BinaryIO]: + """Read bytes from a file while tracking progress. + + Args: + file (Union[str, PathLike[str], BinaryIO]): The path to the file to read, or a file-like object in binary mode. + total (int): Total number of bytes to read. + description (str, optional): Description of task show next to progress bar. Defaults to "Reading". + auto_refresh (bool, optional): Automatic refresh, disable to force a refresh after each iteration. Default is True. + transient: (bool, optional): Clear the progress on exit. Defaults to False. + console (Console, optional): Console to write to. Default creates internal Console instance. + refresh_per_second (float): Number of times per second to refresh the progress information. Defaults to 10. + style (StyleType, optional): Style for the bar background. Defaults to "bar.back". + complete_style (StyleType, optional): Style for the completed bar. Defaults to "bar.complete". + finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.done". + pulse_style (StyleType, optional): Style for pulsing bars. Defaults to "bar.pulse". + disable (bool, optional): Disable display of progress. + Returns: + ContextManager[BinaryIO]: A context manager yielding a progress reader. + + """ + + columns: List["ProgressColumn"] = ( + [TextColumn("[progress.description]{task.description}")] if description else [] + ) + columns.extend( + ( + BarColumn( + style=style, + complete_style=complete_style, + finished_style=finished_style, + pulse_style=pulse_style, + ), + DownloadColumn(), + TimeRemainingColumn(), + ) + ) + progress = Progress( + *columns, + auto_refresh=auto_refresh, + console=console, + transient=transient, + get_time=get_time, + refresh_per_second=refresh_per_second or 10, + disable=disable, + ) + + reader = progress.wrap_file(file, total=total, description=description) + return _ReadContext(progress, reader) + + +@typing.overload +def open( + file: Union[str, "PathLike[str]", bytes], + mode: Union[Literal["rt"], Literal["r"]], + buffering: int = -1, + encoding: Optional[str] = None, + errors: Optional[str] = None, + newline: Optional[str] = None, + *, + total: Optional[int] = None, + description: str = "Reading...", + auto_refresh: bool = True, + console: Optional[Console] = None, + transient: bool = False, + get_time: Optional[Callable[[], float]] = None, + refresh_per_second: float = 10, + style: StyleType = "bar.back", + complete_style: StyleType = "bar.complete", + finished_style: StyleType = "bar.finished", + pulse_style: StyleType = "bar.pulse", + disable: bool = False, +) -> ContextManager[TextIO]: + pass + + +@typing.overload +def open( + file: Union[str, "PathLike[str]", bytes], + mode: Literal["rb"], + buffering: int = -1, + encoding: Optional[str] = None, + errors: Optional[str] = None, + newline: Optional[str] = None, + *, + total: Optional[int] = None, + description: str = "Reading...", + auto_refresh: bool = True, + console: Optional[Console] = None, + transient: bool = False, + get_time: Optional[Callable[[], float]] = None, + refresh_per_second: float = 10, + style: StyleType = "bar.back", + complete_style: StyleType = "bar.complete", + finished_style: StyleType = "bar.finished", + pulse_style: StyleType = "bar.pulse", + disable: bool = False, +) -> ContextManager[BinaryIO]: + pass + + +def open( + file: Union[str, "PathLike[str]", bytes], + mode: Union[Literal["rb"], Literal["rt"], Literal["r"]] = "r", + buffering: int = -1, + encoding: Optional[str] = None, + errors: Optional[str] = None, + newline: Optional[str] = None, + *, + total: Optional[int] = None, + description: str = "Reading...", + auto_refresh: bool = True, + console: Optional[Console] = None, + transient: bool = False, + get_time: Optional[Callable[[], float]] = None, + refresh_per_second: float = 10, + style: StyleType = "bar.back", + complete_style: StyleType = "bar.complete", + finished_style: StyleType = "bar.finished", + pulse_style: StyleType = "bar.pulse", + disable: bool = False, +) -> Union[ContextManager[BinaryIO], ContextManager[TextIO]]: + """Read bytes from a file while tracking progress. + + Args: + path (Union[str, PathLike[str], BinaryIO]): The path to the file to read, or a file-like object in binary mode. + mode (str): The mode to use to open the file. Only supports "r", "rb" or "rt". + buffering (int): The buffering strategy to use, see :func:`io.open`. + encoding (str, optional): The encoding to use when reading in text mode, see :func:`io.open`. + errors (str, optional): The error handling strategy for decoding errors, see :func:`io.open`. + newline (str, optional): The strategy for handling newlines in text mode, see :func:`io.open` + total: (int, optional): Total number of bytes to read. Must be provided if reading from a file handle. Default for a path is os.stat(file).st_size. + description (str, optional): Description of task show next to progress bar. Defaults to "Reading". + auto_refresh (bool, optional): Automatic refresh, disable to force a refresh after each iteration. Default is True. + transient: (bool, optional): Clear the progress on exit. Defaults to False. + console (Console, optional): Console to write to. Default creates internal Console instance. + refresh_per_second (float): Number of times per second to refresh the progress information. Defaults to 10. + style (StyleType, optional): Style for the bar background. Defaults to "bar.back". + complete_style (StyleType, optional): Style for the completed bar. Defaults to "bar.complete". + finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.done". + pulse_style (StyleType, optional): Style for pulsing bars. Defaults to "bar.pulse". + disable (bool, optional): Disable display of progress. + encoding (str, optional): The encoding to use when reading in text mode. + + Returns: + ContextManager[BinaryIO]: A context manager yielding a progress reader. + + """ + + columns: List["ProgressColumn"] = ( + [TextColumn("[progress.description]{task.description}")] if description else [] + ) + columns.extend( + ( + BarColumn( + style=style, + complete_style=complete_style, + finished_style=finished_style, + pulse_style=pulse_style, + ), + DownloadColumn(), + TimeRemainingColumn(), + ) + ) + progress = Progress( + *columns, + auto_refresh=auto_refresh, + console=console, + transient=transient, + get_time=get_time, + refresh_per_second=refresh_per_second or 10, + disable=disable, + ) + + reader = progress.open( + file, + mode=mode, + buffering=buffering, + encoding=encoding, + errors=errors, + newline=newline, + total=total, + description=description, + ) + return _ReadContext(progress, reader) # type: ignore[return-value, type-var] + + class ProgressColumn(ABC): """Base class for a widget to use in progress display.""" @@ -343,18 +676,48 @@ def render(self, task: "Task") -> Text: class TimeRemainingColumn(ProgressColumn): - """Renders estimated time remaining.""" + """Renders estimated time remaining. + + Args: + compact (bool, optional): Render MM:SS when time remaining is less than an hour. Defaults to False. + elapsed_when_finished (bool, optional): Render time elapsed when the task is finished. Defaults to False. + """ # Only refresh twice a second to prevent jitter max_refresh = 0.5 + def __init__( + self, + compact: bool = False, + elapsed_when_finished: bool = False, + table_column: Optional[Column] = None, + ): + self.compact = compact + self.elapsed_when_finished = elapsed_when_finished + super().__init__(table_column=table_column) + def render(self, task: "Task") -> Text: """Show time remaining.""" - remaining = task.time_remaining - if remaining is None: - return Text("-:--:--", style="progress.remaining") - remaining_delta = timedelta(seconds=int(remaining)) - return Text(str(remaining_delta), style="progress.remaining") + if self.elapsed_when_finished and task.finished: + task_time = task.finished_time + style = "progress.elapsed" + else: + task_time = task.time_remaining + style = "progress.remaining" + + if task_time is None: + return Text("--:--" if self.compact else "-:--:--", style=style) + + # Based on https://github.com/tqdm/tqdm/blob/master/tqdm/std.py + minutes, seconds = divmod(int(task_time), 60) + hours, minutes = divmod(minutes, 60) + + if self.compact and not hours: + formatted = f"{minutes:02d}:{seconds:02d}" + else: + formatted = f"{hours:d}:{minutes:02d}:{seconds:02d}" + + return Text(formatted, style=style) class FileSizeColumn(ProgressColumn): @@ -375,6 +738,33 @@ def render(self, task: "Task") -> Text: return Text(data_size, style="progress.filesize.total") +class MofNCompleteColumn(ProgressColumn): + """Renders completed count/total, e.g. ' 10/1000'. + + Best for bounded tasks with int quantities. + + Space pads the completed count so that progress length does not change as task progresses + past powers of 10. + + Args: + separator (str, optional): Text to separate completed and total values. Defaults to "/". + """ + + def __init__(self, separator: str = "/", table_column: Optional[Column] = None): + self.separator = separator + super().__init__(table_column=table_column) + + def render(self, task: "Task") -> Text: + """Show completed/total.""" + completed = int(task.completed) + total = int(task.total) + total_width = len(str(total)) + return Text( + f"{completed:{total_width}d}{self.separator}{total}", + style="progress.download", + ) + + class DownloadColumn(ProgressColumn): """Renders file size downloaded and total, e.g. '0.5/2.3 GB'. @@ -400,7 +790,9 @@ def render(self, task: "Task") -> Text: ) else: unit, suffix = filesize.pick_unit_and_suffix( - total, ["bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"], 1000 + total, + ["bytes", "kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"], + 1000, ) completed_ratio = completed / unit total_ratio = total / unit @@ -475,7 +867,7 @@ class Task: """Optional[float]: The last speed for a finished task.""" _progress: Deque[ProgressSample] = field( - default_factory=deque, init=False, repr=False + default_factory=lambda: deque(maxlen=1000), init=False, repr=False ) _lock: RLock = field(repr=False, default_factory=RLock) @@ -588,12 +980,7 @@ def __init__( refresh_per_second is None or refresh_per_second > 0 ), "refresh_per_second must be > 0" self._lock = RLock() - self.columns = columns or ( - TextColumn("[progress.description]{task.description}"), - BarColumn(), - TextColumn("[progress.percentage]{task.percentage:>3.0f}%"), - TimeRemainingColumn(), - ) + self.columns = columns or self.get_default_columns() self.speed_estimate_period = speed_estimate_period self.disable = disable @@ -613,6 +1000,37 @@ def __init__( self.print = self.console.print self.log = self.console.log + @classmethod + def get_default_columns(cls) -> Tuple[ProgressColumn, ...]: + """Get the default columns used for a new Progress instance: + - a text column for the description (TextColumn) + - the bar itself (BarColumn) + - a text column showing completion percentage (TextColumn) + - an estimated-time-remaining column (TimeRemainingColumn) + If the Progress instance is created without passing a columns argument, + the default columns defined here will be used. + + You can also create a Progress instance using custom columns before + and/or after the defaults, as in this example: + + progress = Progress( + SpinnerColumn(), + *Progress.default_columns(), + "Elapsed:", + TimeElapsedColumn(), + ) + + This code shows the creation of a Progress display, containing + a spinner to the left, the default columns, and a labeled elapsed + time column. + """ + return ( + TextColumn("[progress.description]{task.description}"), + BarColumn(), + TextColumn("[progress.percentage]{task.percentage:>3.0f}%"), + TimeRemainingColumn(), + ) + @property def console(self) -> Console: return self.live.console @@ -709,6 +1127,157 @@ def track( advance(task_id, 1) refresh() + def wrap_file( + self, + file: BinaryIO, + total: Optional[int] = None, + *, + task_id: Optional[TaskID] = None, + description: str = "Reading...", + ) -> BinaryIO: + """Track progress file reading from a binary file. + + Args: + file (BinaryIO): A file-like object opened in binary mode. + total (int, optional): Total number of bytes to read. This must be provided unless a task with a total is also given. + task_id (TaskID): Task to track. Default is new task. + description (str, optional): Description of task, if new task is created. + + Returns: + BinaryIO: A readable file-like object in binary mode. + + Raises: + ValueError: When no total value can be extracted from the arguments or the task. + """ + # attempt to recover the total from the task + total_bytes: Optional[float] = None + if total is not None: + total_bytes = total + elif task_id is not None: + with self._lock: + total_bytes = self._tasks[task_id].total + if total_bytes is None: + raise ValueError( + f"unable to get the total number of bytes, please specify 'total'" + ) + + # update total of task or create new task + if task_id is None: + task_id = self.add_task(description, total=total_bytes) + else: + self.update(task_id, total=total_bytes) + + return _Reader(file, self, task_id, close_handle=False) + + @typing.overload + def open( + self, + file: Union[str, "PathLike[str]", bytes], + mode: Literal["rb"], + buffering: int = -1, + encoding: Optional[str] = None, + errors: Optional[str] = None, + newline: Optional[str] = None, + *, + total: Optional[int] = None, + task_id: Optional[TaskID] = None, + description: str = "Reading...", + ) -> BinaryIO: + pass + + @typing.overload + def open( + self, + file: Union[str, "PathLike[str]", bytes], + mode: Union[Literal["r"], Literal["rt"]], + buffering: int = -1, + encoding: Optional[str] = None, + errors: Optional[str] = None, + newline: Optional[str] = None, + *, + total: Optional[int] = None, + task_id: Optional[TaskID] = None, + description: str = "Reading...", + ) -> TextIO: + pass + + def open( + self, + file: Union[str, "PathLike[str]", bytes], + mode: Union[Literal["rb"], Literal["rt"], Literal["r"]] = "r", + buffering: int = -1, + encoding: Optional[str] = None, + errors: Optional[str] = None, + newline: Optional[str] = None, + *, + total: Optional[int] = None, + task_id: Optional[TaskID] = None, + description: str = "Reading...", + ) -> Union[BinaryIO, TextIO]: + """Track progress while reading from a binary file. + + Args: + path (Union[str, PathLike[str]]): The path to the file to read. + mode (str): The mode to use to open the file. Only supports "r", "rb" or "rt". + buffering (int): The buffering strategy to use, see :func:`io.open`. + encoding (str, optional): The encoding to use when reading in text mode, see :func:`io.open`. + errors (str, optional): The error handling strategy for decoding errors, see :func:`io.open`. + newline (str, optional): The strategy for handling newlines in text mode, see :func:`io.open`. + total (int, optional): Total number of bytes to read. If none given, os.stat(path).st_size is used. + task_id (TaskID): Task to track. Default is new task. + description (str, optional): Description of task, if new task is created. + + Returns: + BinaryIO: A readable file-like object in binary mode. + + Raises: + ValueError: When an invalid mode is given. + """ + # normalize the mode (always rb, rt) + _mode = "".join(sorted(mode, reverse=False)) + if _mode not in ("br", "rt", "r"): + raise ValueError("invalid mode {!r}".format(mode)) + + # patch buffering to provide the same behaviour as the builtin `open` + line_buffering = buffering == 1 + if _mode == "br" and buffering == 1: + warnings.warn( + "line buffering (buffering=1) isn't supported in binary mode, the default buffer size will be used", + RuntimeWarning, + ) + buffering = -1 + elif _mode == "rt" or _mode == "r": + if buffering == 0: + raise ValueError("can't have unbuffered text I/O") + elif buffering == 1: + buffering = -1 + + # attempt to get the total with `os.stat` + if total is None: + total = stat(file).st_size + + # update total of task or create new task + if task_id is None: + task_id = self.add_task(description, total=total) + else: + self.update(task_id, total=total) + + # open the file in binary mode, + handle = io.open(file, "rb", buffering=buffering) + reader = _Reader(handle, self, task_id, close_handle=True) + + # wrap the reader in a `TextIOWrapper` if text mode + if mode == "r" or mode == "rt": + return io.TextIOWrapper( + reader, + encoding=encoding, + errors=errors, + newline=newline, + line_buffering=line_buffering, + ) + + return reader + def start_task(self, task_id: TaskID) -> None: """Start a task. @@ -787,8 +1356,6 @@ def update( popleft = _progress.popleft while _progress and _progress[0].timestamp < old_sample_time: popleft() - while len(_progress) > 1000: - popleft() if update_completed > 0: _progress.append(ProgressSample(current_time, update_completed)) if task.completed >= task.total and task.finished_time is None: @@ -1015,10 +1582,7 @@ def remove_task(self, task_id: TaskID) -> None: with Progress( SpinnerColumn(), - TextColumn("[progress.description]{task.description}"), - BarColumn(), - TextColumn("[progress.percentage]{task.percentage:>3.0f}%"), - TimeRemainingColumn(), + *Progress.get_default_columns(), TimeElapsedColumn(), console=console, transient=True, diff --git a/pipenv/patched/notpip/_vendor/rich/prompt.py b/pipenv/patched/notpip/_vendor/rich/prompt.py index db688480fa..bbc6880f17 100644 --- a/pipenv/patched/notpip/_vendor/rich/prompt.py +++ b/pipenv/patched/notpip/_vendor/rich/prompt.py @@ -228,14 +228,14 @@ def process_response(self, value: str) -> PromptType: """ value = value.strip() try: - return_value = self.response_type(value) + return_value: PromptType = self.response_type(value) except ValueError: raise InvalidResponse(self.validate_error_message) if self.choices is not None and not self.check_choice(value): raise InvalidResponse(self.illegal_choice_message) - return return_value # type: ignore + return return_value def on_validate_error(self, value: str, error: InvalidResponse) -> None: """Called to handle validation error. diff --git a/pipenv/patched/notpip/_vendor/rich/protocol.py b/pipenv/patched/notpip/_vendor/rich/protocol.py index f1f56fd65a..d1f8182233 100644 --- a/pipenv/patched/notpip/_vendor/rich/protocol.py +++ b/pipenv/patched/notpip/_vendor/rich/protocol.py @@ -1,4 +1,4 @@ -from typing import Any, Callable, cast, Set, TYPE_CHECKING +from typing import Any, cast, Set, TYPE_CHECKING from inspect import isclass if TYPE_CHECKING: diff --git a/pipenv/patched/notpip/_vendor/rich/repr.py b/pipenv/patched/notpip/_vendor/rich/repr.py index 6facd32335..c4bd1a902b 100644 --- a/pipenv/patched/notpip/_vendor/rich/repr.py +++ b/pipenv/patched/notpip/_vendor/rich/repr.py @@ -1,5 +1,6 @@ from functools import partial import inspect +import sys from typing import ( Any, @@ -27,28 +28,28 @@ class ReprError(Exception): @overload -def auto(cls: Optional[T]) -> T: +def auto(cls: Optional[Type[T]]) -> Type[T]: ... @overload -def auto(*, angular: bool = False) -> Callable[[T], T]: +def auto(*, angular: bool = False) -> Callable[[Type[T]], Type[T]]: ... def auto( - cls: Optional[T] = None, *, angular: Optional[bool] = None -) -> Union[T, Callable[[T], T]]: + cls: Optional[Type[T]] = None, *, angular: Optional[bool] = None +) -> Union[Type[T], Callable[[Type[T]], Type[T]]]: """Class decorator to create __repr__ from __rich_repr__""" def do_replace(cls: Type[T], angular: Optional[bool] = None) -> Type[T]: - def auto_repr(self: Type[T]) -> str: + def auto_repr(self: T) -> str: """Create repr string from __rich_repr__""" repr_str: List[str] = [] append = repr_str.append - angular = getattr(self.__rich_repr__, "angular", False) # type: ignore - for arg in self.__rich_repr__(): # type: ignore + angular: bool = getattr(self.__rich_repr__, "angular", False) # type: ignore[attr-defined] + for arg in self.__rich_repr__(): # type: ignore[attr-defined] if isinstance(arg, tuple): if len(arg) == 1: append(repr(arg[0])) @@ -70,7 +71,7 @@ def auto_repr(self: Type[T]) -> str: def auto_rich_repr(self: Type[T]) -> Result: """Auto generate __rich_rep__ from signature of __init__""" try: - signature = inspect.signature(self.__init__) ## type: ignore + signature = inspect.signature(self.__init__) for name, param in signature.parameters.items(): if param.kind == param.POSITIONAL_ONLY: yield getattr(self, name) @@ -89,33 +90,33 @@ def auto_rich_repr(self: Type[T]) -> Result: if not hasattr(cls, "__rich_repr__"): auto_rich_repr.__doc__ = "Build a rich repr" - cls.__rich_repr__ = auto_rich_repr # type: ignore + cls.__rich_repr__ = auto_rich_repr # type: ignore[attr-defined] auto_repr.__doc__ = "Return repr(self)" - cls.__repr__ = auto_repr # type: ignore + cls.__repr__ = auto_repr # type: ignore[assignment] if angular is not None: - cls.__rich_repr__.angular = angular # type: ignore + cls.__rich_repr__.angular = angular # type: ignore[attr-defined] return cls if cls is None: - return partial(do_replace, angular=angular) # type: ignore + return partial(do_replace, angular=angular) else: - return do_replace(cls, angular=angular) # type: ignore + return do_replace(cls, angular=angular) @overload -def rich_repr(cls: Optional[T]) -> T: +def rich_repr(cls: Optional[Type[T]]) -> Type[T]: ... @overload -def rich_repr(*, angular: bool = False) -> Callable[[T], T]: +def rich_repr(*, angular: bool = False) -> Callable[[Type[T]], Type[T]]: ... def rich_repr( - cls: Optional[T] = None, *, angular: bool = False -) -> Union[T, Callable[[T], T]]: + cls: Optional[Type[T]] = None, *, angular: bool = False +) -> Union[Type[T], Callable[[Type[T]], Type[T]]]: if cls is None: return auto(angular=angular) else: @@ -143,7 +144,7 @@ def __rich_repr__(self) -> Result: console.print(foo, width=30) console.rule("Angular repr") - Foo.__rich_repr__.angular = True # type: ignore + Foo.__rich_repr__.angular = True # type: ignore[attr-defined] console.print(foo) diff --git a/pipenv/patched/notpip/_vendor/rich/segment.py b/pipenv/patched/notpip/_vendor/rich/segment.py index 0c8a3ba08a..6b7f0306c1 100644 --- a/pipenv/patched/notpip/_vendor/rich/segment.py +++ b/pipenv/patched/notpip/_vendor/rich/segment.py @@ -64,15 +64,25 @@ class Segment(NamedTuple): Args: text (str): A piece of text. style (:class:`~rich.style.Style`, optional): An optional style to apply to the text. - control (Tuple[ControlCode..], optional): Optional sequence of control codes. + control (Tuple[ControlCode], optional): Optional sequence of control codes. + + Attributes: + cell_length (int): The cell length of this Segment. """ - text: str = "" - """Raw text.""" + text: str style: Optional[Style] = None - """An optional style.""" control: Optional[Sequence[ControlCode]] = None - """Optional sequence of control codes.""" + + @property + def cell_length(self) -> int: + """The number of terminal cells required to display self.text. + + Returns: + int: A number of cells. + """ + text, _style, control = self + return 0 if control else cell_len(text) def __rich_repr__(self) -> Result: yield self.text @@ -87,11 +97,6 @@ def __bool__(self) -> bool: """Check if the segment contains text.""" return bool(self.text) - @property - def cell_length(self) -> int: - """Get cell length of segment.""" - return 0 if self.control else cell_len(self.text) - @property def is_control(self) -> bool: """Check if the segment contains control codes.""" @@ -99,7 +104,7 @@ def is_control(self) -> bool: @classmethod @lru_cache(1024 * 16) - def _split_cells(cls, segment: "Segment", cut: int) -> Tuple["Segment", "Segment"]: # type: ignore + def _split_cells(cls, segment: "Segment", cut: int) -> Tuple["Segment", "Segment"]: text, style, control = segment _Segment = Segment @@ -135,6 +140,8 @@ def _split_cells(cls, segment: "Segment", cut: int) -> Tuple["Segment", "Segment _Segment(" " + text[pos:], style, control), ) + raise AssertionError("Will never reach here") + def split_cells(self, cut: int) -> Tuple["Segment", "Segment"]: """Split segment in to two segments at the specified column. @@ -682,39 +689,35 @@ def __rich_console__( yield from line -if __name__ == "__main__": +if __name__ == "__main__": # pragma: no cover + from pipenv.patched.notpip._vendor.rich.console import Console + from pipenv.patched.notpip._vendor.rich.syntax import Syntax + from pipenv.patched.notpip._vendor.rich.text import Text - if __name__ == "__main__": # pragma: no cover - from pipenv.patched.notpip._vendor.rich.console import Console - from pipenv.patched.notpip._vendor.rich.syntax import Syntax - from pipenv.patched.notpip._vendor.rich.text import Text + code = """from rich.console import Console +console = Console() +text = Text.from_markup("Hello, [bold magenta]World[/]!") +console.print(text)""" - code = """from rich.console import Console - console = Console() text = Text.from_markup("Hello, [bold magenta]World[/]!") - console.print(text)""" - text = Text.from_markup("Hello, [bold magenta]World[/]!") - - console = Console() + console = Console() - console.rule("rich.Segment") - console.print( - "A Segment is the last step in the Rich render process before generating text with ANSI codes." - ) - console.print("\nConsider the following code:\n") - console.print(Syntax(code, "python", line_numbers=True)) - console.print() - console.print( - "When you call [b]print()[/b], Rich [i]renders[/i] the object in to the the following:\n" - ) - fragments = list(console.render(text)) - console.print(fragments) - console.print() - console.print( - "The Segments are then processed to produce the following output:\n" - ) - console.print(text) - console.print( - "\nYou will only need to know this if you are implementing your own Rich renderables." - ) + console.rule("rich.Segment") + console.print( + "A Segment is the last step in the Rich render process before generating text with ANSI codes." + ) + console.print("\nConsider the following code:\n") + console.print(Syntax(code, "python", line_numbers=True)) + console.print() + console.print( + "When you call [b]print()[/b], Rich [i]renders[/i] the object in to the the following:\n" + ) + fragments = list(console.render(text)) + console.print(fragments) + console.print() + console.print("The Segments are then processed to produce the following output:\n") + console.print(text) + console.print( + "\nYou will only need to know this if you are implementing your own Rich renderables." + ) diff --git a/pipenv/patched/notpip/_vendor/rich/syntax.py b/pipenv/patched/notpip/_vendor/rich/syntax.py index 41b9c3b58d..d26879bcad 100644 --- a/pipenv/patched/notpip/_vendor/rich/syntax.py +++ b/pipenv/patched/notpip/_vendor/rich/syntax.py @@ -1,6 +1,5 @@ import os.path import platform -from pipenv.patched.notpip._vendor.rich.containers import Lines import textwrap from abc import ABC, abstractmethod from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Type, Union @@ -23,6 +22,8 @@ ) from pipenv.patched.notpip._vendor.pygments.util import ClassNotFound +from pipenv.patched.notpip._vendor.rich.containers import Lines + from ._loop import loop_first from .color import Color, blend_rgb from .console import Console, ConsoleOptions, JustifyMethod, RenderResult @@ -200,7 +201,8 @@ class Syntax(JupyterMixin): dedent (bool, optional): Enable stripping of initial whitespace. Defaults to False. line_numbers (bool, optional): Enable rendering of line numbers. Defaults to False. start_line (int, optional): Starting number for line numbers. Defaults to 1. - line_range (Tuple[int, int], optional): If given should be a tuple of the start and end line to render. + line_range (Tuple[int | None, int | None], optional): If given should be a tuple of the start and end line to render. + A value of None in the tuple indicates the range is open in that direction. highlight_lines (Set[int]): A set of line numbers to highlight. code_width: Width of code to render (not including line numbers), or ``None`` to use all available width. tab_size (int, optional): Size of tabs. Defaults to 4. @@ -233,7 +235,7 @@ def __init__( dedent: bool = False, line_numbers: bool = False, start_line: int = 1, - line_range: Optional[Tuple[int, int]] = None, + line_range: Optional[Tuple[Optional[int], Optional[int]]] = None, highlight_lines: Optional[Set[int]] = None, code_width: Optional[int] = None, tab_size: int = 4, @@ -264,6 +266,7 @@ def from_path( cls, path: str, encoding: str = "utf-8", + lexer: Optional[Union[Lexer, str]] = None, theme: Union[str, SyntaxTheme] = DEFAULT_THEME, dedent: bool = False, line_numbers: bool = False, @@ -281,6 +284,7 @@ def from_path( Args: path (str): Path to file to highlight. encoding (str): Encoding of file. + lexer (str | Lexer, optional): Lexer to use. If None, lexer will be auto-detected from path/file content. theme (str, optional): Color theme, aka Pygments style (see https://pygments.org/docs/styles/#getting-a-list-of-available-styles). Defaults to "emacs". dedent (bool, optional): Enable stripping of initial whitespace. Defaults to True. line_numbers (bool, optional): Enable rendering of line numbers. Defaults to False. @@ -299,26 +303,12 @@ def from_path( with open(path, "rt", encoding=encoding) as code_file: code = code_file.read() - lexer = None - lexer_name = "default" - try: - _, ext = os.path.splitext(path) - if ext: - extension = ext.lstrip(".").lower() - lexer = get_lexer_by_name(extension) - lexer_name = lexer.name - except ClassNotFound: - pass - - if lexer is None: - try: - lexer_name = guess_lexer_for_filename(path, code).name - except ClassNotFound: - pass + if not lexer: + lexer = cls.guess_lexer(path, code=code) return cls( code, - lexer_name, + lexer, theme=theme, dedent=dedent, line_numbers=line_numbers, @@ -332,6 +322,48 @@ def from_path( indent_guides=indent_guides, ) + @classmethod + def guess_lexer(cls, path: str, code: Optional[str] = None) -> str: + """Guess the alias of the Pygments lexer to use based on a path and an optional string of code. + If code is supplied, it will use a combination of the code and the filename to determine the + best lexer to use. For example, if the file is ``index.html`` and the file contains Django + templating syntax, then "html+django" will be returned. If the file is ``index.html``, and no + templating language is used, the "html" lexer will be used. If no string of code + is supplied, the lexer will be chosen based on the file extension.. + + Args: + path (AnyStr): The path to the file containing the code you wish to know the lexer for. + code (str, optional): Optional string of code that will be used as a fallback if no lexer + is found for the supplied path. + + Returns: + str: The name of the Pygments lexer that best matches the supplied path/code. + """ + lexer: Optional[Lexer] = None + lexer_name = "default" + if code: + try: + lexer = guess_lexer_for_filename(path, code) + except ClassNotFound: + pass + + if not lexer: + try: + _, ext = os.path.splitext(path) + if ext: + extension = ext.lstrip(".").lower() + lexer = get_lexer_by_name(extension) + except ClassNotFound: + pass + + if lexer: + if lexer.aliases: + lexer_name = lexer.aliases[0] + else: + lexer_name = lexer.name + + return lexer_name + def _get_base_style(self) -> Style: """Get the base style.""" default_style = self._theme.get_background_style() + self.background_style @@ -369,7 +401,9 @@ def lexer(self) -> Optional[Lexer]: return None def highlight( - self, code: str, line_range: Optional[Tuple[int, int]] = None + self, + code: str, + line_range: Optional[Tuple[Optional[int], Optional[int]]] = None, ) -> Text: """Highlight code and return a Text instance. @@ -417,7 +451,7 @@ def tokens_to_spans() -> Iterable[Tuple[str, Optional[Style]]]: """Convert tokens to spans.""" tokens = iter(line_tokenize()) line_no = 0 - _line_start = line_start - 1 + _line_start = line_start - 1 if line_start else 0 # Skip over tokens until line start while line_no < _line_start: @@ -430,7 +464,7 @@ def tokens_to_spans() -> Iterable[Tuple[str, Optional[Style]]]: yield (token, _get_theme_style(token_type)) if token.endswith("\n"): line_no += 1 - if line_no >= line_end: + if line_end and line_no >= line_end: break text.append_tokens(tokens_to_spans()) @@ -513,11 +547,6 @@ def __rich_console__( else self.code_width ) - line_offset = 0 - if self.line_range: - start_line, end_line = self.line_range - line_offset = max(0, start_line - 1) - ends_on_nl = self.code.endswith("\n") code = self.code if ends_on_nl else self.code + "\n" code = textwrap.dedent(code) if self.dedent else code @@ -550,7 +579,7 @@ def __rich_console__( else: syntax_lines = console.render_lines( text, - options.update(width=code_width, height=None), + options.update(width=code_width, height=None, justify="left"), style=self.background_style, pad=True, new_lines=True, @@ -559,6 +588,10 @@ def __rich_console__( yield from syntax_line return + start_line, end_line = self.line_range or (None, None) + line_offset = 0 + if start_line: + line_offset = max(0, start_line - 1) lines: Union[List[Text], Lines] = text.split("\n", allow_blank=ends_on_nl) if self.line_range: lines = lines[line_offset:end_line] @@ -591,7 +624,7 @@ def __rich_console__( if self.word_wrap: wrapped_lines = console.render_lines( line, - render_options.update(height=None), + render_options.update(height=None, justify="left"), style=background_style, pad=not transparent_background, ) @@ -702,7 +735,7 @@ def __rich_console__( parser.add_argument( "-x", "--lexer", - default="default", + default=None, dest="lexer_name", help="Lexer name", ) @@ -726,6 +759,7 @@ def __rich_console__( else: syntax = Syntax.from_path( args.path, + lexer=args.lexer_name, line_numbers=args.line_numbers, word_wrap=args.word_wrap, theme=args.theme, diff --git a/pipenv/patched/notpip/_vendor/rich/table.py b/pipenv/patched/notpip/_vendor/rich/table.py index 4f6063b424..580dfee05d 100644 --- a/pipenv/patched/notpip/_vendor/rich/table.py +++ b/pipenv/patched/notpip/_vendor/rich/table.py @@ -37,7 +37,35 @@ @dataclass class Column: - """Defines a column in a table.""" + """Defines a column within a ~Table. + + Args: + title (Union[str, Text], optional): The title of the table rendered at the top. Defaults to None. + caption (Union[str, Text], optional): The table caption rendered below. Defaults to None. + width (int, optional): The width in characters of the table, or ``None`` to automatically fit. Defaults to None. + min_width (Optional[int], optional): The minimum width of the table, or ``None`` for no minimum. Defaults to None. + box (box.Box, optional): One of the constants in box.py used to draw the edges (see :ref:`appendix_box`), or ``None`` for no box lines. Defaults to box.HEAVY_HEAD. + safe_box (Optional[bool], optional): Disable box characters that don't display on windows legacy terminal with *raster* fonts. Defaults to True. + padding (PaddingDimensions, optional): Padding for cells (top, right, bottom, left). Defaults to (0, 1). + collapse_padding (bool, optional): Enable collapsing of padding around cells. Defaults to False. + pad_edge (bool, optional): Enable padding of edge cells. Defaults to True. + expand (bool, optional): Expand the table to fit the available space if ``True``, otherwise the table width will be auto-calculated. Defaults to False. + show_header (bool, optional): Show a header row. Defaults to True. + show_footer (bool, optional): Show a footer row. Defaults to False. + show_edge (bool, optional): Draw a box around the outside of the table. Defaults to True. + show_lines (bool, optional): Draw lines between every row. Defaults to False. + leading (bool, optional): Number of blank lines between rows (precludes ``show_lines``). Defaults to 0. + style (Union[str, Style], optional): Default style for the table. Defaults to "none". + row_styles (List[Union, str], optional): Optional list of row styles, if more than one style is given then the styles will alternate. Defaults to None. + header_style (Union[str, Style], optional): Style of the header. Defaults to "table.header". + footer_style (Union[str, Style], optional): Style of the footer. Defaults to "table.footer". + border_style (Union[str, Style], optional): Style of the border. Defaults to None. + title_style (Union[str, Style], optional): Style of the title. Defaults to None. + caption_style (Union[str, Style], optional): Style of the caption. Defaults to None. + title_justify (str, optional): Justify method for title. Defaults to "center". + caption_justify (str, optional): Justify method for caption. Defaults to "center". + highlight (bool, optional): Highlight cell contents (if str). Defaults to False. + """ header: "RenderableType" = "" """RenderableType: Renderable for the header (typically a string)""" diff --git a/pipenv/patched/notpip/_vendor/rich/tabulate.py b/pipenv/patched/notpip/_vendor/rich/tabulate.py deleted file mode 100644 index 9384ec4301..0000000000 --- a/pipenv/patched/notpip/_vendor/rich/tabulate.py +++ /dev/null @@ -1,51 +0,0 @@ -from collections.abc import Mapping -from typing import Any, Optional -import warnings - -from pipenv.patched.notpip._vendor.rich.console import JustifyMethod - -from . import box -from .highlighter import ReprHighlighter -from .pretty import Pretty -from .table import Table - - -def tabulate_mapping( - mapping: "Mapping[Any, Any]", - title: Optional[str] = None, - caption: Optional[str] = None, - title_justify: Optional[JustifyMethod] = None, - caption_justify: Optional[JustifyMethod] = None, -) -> Table: - """Generate a simple table from a mapping. - - Args: - mapping (Mapping): A mapping object (e.g. a dict); - title (str, optional): Optional title to be displayed over the table. - caption (str, optional): Optional caption to be displayed below the table. - title_justify (str, optional): Justify method for title. Defaults to None. - caption_justify (str, optional): Justify method for caption. Defaults to None. - - Returns: - Table: A table instance which may be rendered by the Console. - """ - warnings.warn("tabulate_mapping will be deprecated in Rich v11", DeprecationWarning) - table = Table( - show_header=False, - title=title, - caption=caption, - box=box.ROUNDED, - border_style="blue", - ) - table.title = title - table.caption = caption - if title_justify is not None: - table.title_justify = title_justify - if caption_justify is not None: - table.caption_justify = caption_justify - highlighter = ReprHighlighter() - for key, value in mapping.items(): - table.add_row( - Pretty(key, highlighter=highlighter), Pretty(value, highlighter=highlighter) - ) - return table diff --git a/pipenv/patched/notpip/_vendor/rich/terminal_theme.py b/pipenv/patched/notpip/_vendor/rich/terminal_theme.py index 801ac0b7b8..ace8e93def 100644 --- a/pipenv/patched/notpip/_vendor/rich/terminal_theme.py +++ b/pipenv/patched/notpip/_vendor/rich/terminal_theme.py @@ -53,3 +53,101 @@ def __init__( (255, 255, 255), ], ) + +SVG_EXPORT_THEME = TerminalTheme( + (12, 12, 12), + (242, 242, 242), + [ + (12, 12, 12), + (205, 49, 49), + (13, 188, 121), + (229, 229, 16), + (36, 114, 200), + (188, 63, 188), + (17, 168, 205), + (229, 229, 229), + ], + [ + (102, 102, 102), + (241, 76, 76), + (35, 209, 139), + (245, 245, 67), + (59, 142, 234), + (214, 112, 214), + (41, 184, 219), + (229, 229, 229), + ], +) + +MONOKAI = TerminalTheme( + (12, 12, 12), + (217, 217, 217), + [ + (26, 26, 26), + (244, 0, 95), + (152, 224, 36), + (253, 151, 31), + (157, 101, 255), + (244, 0, 95), + (88, 209, 235), + (196, 197, 181), + (98, 94, 76), + ], + [ + (244, 0, 95), + (152, 224, 36), + (224, 213, 97), + (157, 101, 255), + (244, 0, 95), + (88, 209, 235), + (246, 246, 239), + ], +) +DIMMED_MONOKAI = TerminalTheme( + (25, 25, 25), + (185, 188, 186), + [ + (58, 61, 67), + (190, 63, 72), + (135, 154, 59), + (197, 166, 53), + (79, 118, 161), + (133, 92, 141), + (87, 143, 164), + (185, 188, 186), + (136, 137, 135), + ], + [ + (251, 0, 31), + (15, 114, 47), + (196, 112, 51), + (24, 109, 227), + (251, 0, 103), + (46, 112, 109), + (253, 255, 185), + ], +) +NIGHT_OWLISH = TerminalTheme( + (255, 255, 255), + (64, 63, 83), + [ + (1, 22, 39), + (211, 66, 62), + (42, 162, 152), + (218, 170, 1), + (72, 118, 214), + (64, 63, 83), + (8, 145, 106), + (122, 129, 129), + (122, 129, 129), + ], + [ + (247, 110, 110), + (73, 208, 197), + (218, 194, 107), + (92, 167, 228), + (105, 112, 152), + (0, 201, 144), + (152, 159, 177), + ], +) diff --git a/pipenv/patched/notpip/_vendor/rich/text.py b/pipenv/patched/notpip/_vendor/rich/text.py index c8b3ded50d..773a3d5af4 100644 --- a/pipenv/patched/notpip/_vendor/rich/text.py +++ b/pipenv/patched/notpip/_vendor/rich/text.py @@ -253,6 +253,7 @@ def from_markup( emoji_variant: Optional[EmojiVariant] = None, justify: Optional["JustifyMethod"] = None, overflow: Optional["OverflowMethod"] = None, + end: str = "\n", ) -> "Text": """Create Text instance from markup. @@ -261,6 +262,7 @@ def from_markup( emoji (bool, optional): Also render emoji code. Defaults to True. justify (str, optional): Justify method: "left", "center", "full", "right". Defaults to None. overflow (str, optional): Overflow method: "crop", "fold", "ellipsis". Defaults to None. + end (str, optional): Character to end text with. Defaults to "\\\\n". Returns: Text: A Text instance with markup rendered. @@ -270,6 +272,7 @@ def from_markup( rendered_text = render(text, style, emoji=emoji, emoji_variant=emoji_variant) rendered_text.justify = justify rendered_text.overflow = overflow + rendered_text.end = end return rendered_text @classmethod diff --git a/pipenv/patched/notpip/_vendor/rich/traceback.py b/pipenv/patched/notpip/_vendor/rich/traceback.py index 0f81848085..f6c66eea31 100644 --- a/pipenv/patched/notpip/_vendor/rich/traceback.py +++ b/pipenv/patched/notpip/_vendor/rich/traceback.py @@ -12,9 +12,10 @@ from pipenv.patched.notpip._vendor.pygments.token import Comment, Keyword, Name, Number, Operator, String from pipenv.patched.notpip._vendor.pygments.token import Text as TextToken from pipenv.patched.notpip._vendor.pygments.token import Token +from pipenv.patched.notpip._vendor.pygments.util import ClassNotFound from . import pretty -from ._loop import loop_first, loop_last +from ._loop import loop_last from .columns import Columns from .console import Console, ConsoleOptions, ConsoleRenderable, RenderResult, group from .constrain import Constrain @@ -130,7 +131,7 @@ def ipy_display_traceback( try: # pragma: no cover # if within ipython, use customized traceback - ip = get_ipython() # type: ignore + ip = get_ipython() # type: ignore[name-defined] ipy_excepthook_closure(ip) return sys.excepthook except Exception: @@ -390,9 +391,8 @@ def safe_str(_object: Any) -> str: exc_type = cause.__class__ exc_value = cause traceback = cause.__traceback__ - if traceback: - is_cause = True - continue + is_cause = True + continue cause = exc_value.__context__ if ( @@ -403,9 +403,8 @@ def safe_str(_object: Any) -> str: exc_type = cause.__class__ exc_value = cause traceback = cause.__traceback__ - if traceback: - is_cause = False - continue + is_cause = False + continue # No cover, code is reached but coverage doesn't recognize it. break # pragma: no cover @@ -523,10 +522,10 @@ def _guess_lexer(cls, filename: str, code: str) -> str: first_line = code[:new_line_index] if new_line_index != -1 else code if first_line.startswith("#!") and "python" in first_line.lower(): return "python" - lexer_name = ( - cls.LEXERS.get(ext) or guess_lexer_for_filename(filename, code).name - ) - return lexer_name + try: + return cls.LEXERS.get(ext) or guess_lexer_for_filename(filename, code).name + except ClassNotFound: + return "text" @group() def _render_stack(self, stack: Stack) -> RenderResult: @@ -671,7 +670,7 @@ def error() -> None: try: foo(0) except: - slfkjsldkfj # type: ignore + slfkjsldkfj # type: ignore[name-defined] except: console.print_exception(show_locals=True) diff --git a/pipenv/patched/notpip/_vendor/rich/tree.py b/pipenv/patched/notpip/_vendor/rich/tree.py index 7c306960f6..cdc5843bde 100644 --- a/pipenv/patched/notpip/_vendor/rich/tree.py +++ b/pipenv/patched/notpip/_vendor/rich/tree.py @@ -136,6 +136,7 @@ def make_guide(index: int, style: Style) -> Segment: highlight=self.highlight, height=None, ), + pad=options.justify is not None, ) if not (depth == 0 and self.hide_root): @@ -214,9 +215,9 @@ def __rich_measure__( code = """\ class Segment(NamedTuple): - text: str = "" - style: Optional[Style] = None - is_control: bool = False + text: str = "" + style: Optional[Style] = None + is_control: bool = False """ syntax = Syntax(code, "python", theme="monokai", line_numbers=True) @@ -224,7 +225,7 @@ class Segment(NamedTuple): """\ ### example.md > Hello, World! -> +> > Markdown _all_ the things """ ) @@ -246,4 +247,5 @@ class Segment(NamedTuple): containers_node.add(Group("📄 [b magenta]Table", table)) console = Console() + console.print(root) diff --git a/pipenv/patched/notpip/_vendor/six.LICENSE b/pipenv/patched/notpip/_vendor/six.LICENSE new file mode 100644 index 0000000000..de6633112c --- /dev/null +++ b/pipenv/patched/notpip/_vendor/six.LICENSE @@ -0,0 +1,18 @@ +Copyright (c) 2010-2020 Benjamin Peterson + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/pipenv/patched/notpip/_vendor/tomli/LICENSE b/pipenv/patched/notpip/_vendor/tomli/LICENSE new file mode 100644 index 0000000000..e859590f88 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/tomli/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Taneli Hukkinen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/pipenv/patched/notpip/_vendor/tomli/__init__.py b/pipenv/patched/notpip/_vendor/tomli/__init__.py index 2b642a24dc..4c6ec97ec6 100644 --- a/pipenv/patched/notpip/_vendor/tomli/__init__.py +++ b/pipenv/patched/notpip/_vendor/tomli/__init__.py @@ -1,6 +1,11 @@ -"""A lil' TOML parser.""" +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. __all__ = ("loads", "load", "TOMLDecodeError") -__version__ = "1.0.3" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT +__version__ = "2.0.1" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT -from pipenv.patched.notpip._vendor.tomli._parser import TOMLDecodeError, load, loads +from ._parser import TOMLDecodeError, load, loads + +# Pretend this exception was created here. +TOMLDecodeError.__module__ = __name__ diff --git a/pipenv/patched/notpip/_vendor/tomli/_parser.py b/pipenv/patched/notpip/_vendor/tomli/_parser.py index ebd80a9e9a..f1bb0aa19a 100644 --- a/pipenv/patched/notpip/_vendor/tomli/_parser.py +++ b/pipenv/patched/notpip/_vendor/tomli/_parser.py @@ -1,42 +1,33 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +from __future__ import annotations + +from collections.abc import Iterable import string from types import MappingProxyType -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - FrozenSet, - Iterable, - Optional, - TextIO, - Tuple, -) +from typing import Any, BinaryIO, NamedTuple -from pipenv.patched.notpip._vendor.tomli._re import ( - RE_BIN, +from ._re import ( RE_DATETIME, - RE_HEX, RE_LOCALTIME, RE_NUMBER, - RE_OCT, match_to_datetime, match_to_localtime, match_to_number, ) - -if TYPE_CHECKING: - from re import Pattern - +from ._types import Key, ParseFloat, Pos ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127)) # Neither of these sets include quotation mark or backslash. They are # currently handled as separate cases in the parser functions. ILLEGAL_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t") -ILLEGAL_MULTILINE_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t\n\r") +ILLEGAL_MULTILINE_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t\n") ILLEGAL_LITERAL_STR_CHARS = ILLEGAL_BASIC_STR_CHARS -ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ASCII_CTRL - frozenset("\t\n") +ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ILLEGAL_MULTILINE_BASIC_STR_CHARS ILLEGAL_COMMENT_CHARS = ILLEGAL_BASIC_STR_CHARS @@ -44,6 +35,7 @@ TOML_WS_AND_NEWLINE = TOML_WS | frozenset("\n") BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_") KEY_INITIAL_CHARS = BARE_KEY_CHARS | frozenset("\"'") +HEXDIGIT_CHARS = frozenset(string.hexdigits) BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType( { @@ -57,30 +49,33 @@ } ) -# Type annotations -ParseFloat = Callable[[str], Any] -Key = Tuple[str, ...] -Pos = int - class TOMLDecodeError(ValueError): """An error raised if a document is not valid TOML.""" -def load(fp: TextIO, *, parse_float: ParseFloat = float) -> Dict[str, Any]: - """Parse TOML from a file object.""" - s = fp.read() +def load(__fp: BinaryIO, *, parse_float: ParseFloat = float) -> dict[str, Any]: + """Parse TOML from a binary file object.""" + b = __fp.read() + try: + s = b.decode() + except AttributeError: + raise TypeError( + "File must be opened in binary mode, e.g. use `open('foo.toml', 'rb')`" + ) from None return loads(s, parse_float=parse_float) -def loads(s: str, *, parse_float: ParseFloat = float) -> Dict[str, Any]: # noqa: C901 +def loads(__s: str, *, parse_float: ParseFloat = float) -> dict[str, Any]: # noqa: C901 """Parse TOML from a string.""" # The spec allows converting "\r\n" to "\n", even in string # literals. Let's do so to simplify parsing. - src = s.replace("\r\n", "\n") + src = __s.replace("\r\n", "\n") pos = 0 - state = State() + out = Output(NestedDict(), Flags()) + header: Key = () + parse_float = make_safe_parse_float(parse_float) # Parse one statement at a time # (typically means one line in TOML source) @@ -104,17 +99,18 @@ def loads(s: str, *, parse_float: ParseFloat = float) -> Dict[str, Any]: # noqa pos += 1 continue if char in KEY_INITIAL_CHARS: - pos = key_value_rule(src, pos, state, parse_float) + pos = key_value_rule(src, pos, out, header, parse_float) pos = skip_chars(src, pos, TOML_WS) elif char == "[": try: - second_char: Optional[str] = src[pos + 1] + second_char: str | None = src[pos + 1] except IndexError: second_char = None + out.flags.finalize_pending() if second_char == "[": - pos = create_list_rule(src, pos, state) + pos, header = create_list_rule(src, pos, out) else: - pos = create_dict_rule(src, pos, state) + pos, header = create_dict_rule(src, pos, out) pos = skip_chars(src, pos, TOML_WS) elif char != "#": raise suffixed_err(src, pos, "Invalid statement") @@ -133,17 +129,7 @@ def loads(s: str, *, parse_float: ParseFloat = float) -> Dict[str, Any]: # noqa ) pos += 1 - return state.out.dict - - -class State: - def __init__(self) -> None: - # Mutable, read-only - self.out = NestedDict() - self.flags = Flags() - - # Immutable, read and write - self.header_namespace: Key = () + return out.data.dict class Flags: @@ -156,7 +142,16 @@ class Flags: EXPLICIT_NEST = 1 def __init__(self) -> None: - self._flags: Dict[str, dict] = {} + self._flags: dict[str, dict] = {} + self._pending_flags: set[tuple[Key, int]] = set() + + def add_pending(self, key: Key, flag: int) -> None: + self._pending_flags.add((key, flag)) + + def finalize_pending(self) -> None: + for key, flag in self._pending_flags: + self.set(key, flag, recursive=False) + self._pending_flags.clear() def unset_all(self, key: Key) -> None: cont = self._flags @@ -166,19 +161,6 @@ def unset_all(self, key: Key) -> None: cont = cont[k]["nested"] cont.pop(key[-1], None) - def set_for_relative_key(self, head_key: Key, rel_key: Key, flag: int) -> None: - cont = self._flags - for k in head_key: - if k not in cont: - cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}} - cont = cont[k]["nested"] - for k in rel_key: - if k in cont: - cont[k]["flags"].add(flag) - else: - cont[k] = {"flags": {flag}, "recursive_flags": set(), "nested": {}} - cont = cont[k]["nested"] - def set(self, key: Key, flag: int, *, recursive: bool) -> None: # noqa: A003 cont = self._flags key_parent, key_stem = key[:-1], key[-1] @@ -211,7 +193,7 @@ def is_(self, key: Key, flag: int) -> bool: class NestedDict: def __init__(self) -> None: # The parsed content of the TOML document - self.dict: Dict[str, Any] = {} + self.dict: dict[str, Any] = {} def get_or_create_nest( self, @@ -242,6 +224,11 @@ def append_nest_to_list(self, key: Key) -> None: cont[last_key] = [{}] +class Output(NamedTuple): + data: NestedDict + flags: Flags + + def skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos: try: while src[pos] in chars: @@ -256,7 +243,7 @@ def skip_until( pos: Pos, expect: str, *, - error_on: FrozenSet[str], + error_on: frozenset[str], error_on_eof: bool, ) -> Pos: try: @@ -264,19 +251,18 @@ def skip_until( except ValueError: new_pos = len(src) if error_on_eof: - raise suffixed_err(src, new_pos, f'Expected "{expect!r}"') + raise suffixed_err(src, new_pos, f"Expected {expect!r}") from None - bad_chars = error_on.intersection(src[pos:new_pos]) - if bad_chars: - bad_char = next(iter(bad_chars)) - bad_pos = src.index(bad_char, pos) - raise suffixed_err(src, bad_pos, f'Found invalid character "{bad_char!r}"') + if not error_on.isdisjoint(src[pos:new_pos]): + while src[pos] not in error_on: + pos += 1 + raise suffixed_err(src, pos, f"Found invalid character {src[pos]!r}") return new_pos def skip_comment(src: str, pos: Pos) -> Pos: try: - char: Optional[str] = src[pos] + char: str | None = src[pos] except IndexError: char = None if char == "#": @@ -295,115 +281,116 @@ def skip_comments_and_array_ws(src: str, pos: Pos) -> Pos: return pos -def create_dict_rule(src: str, pos: Pos, state: State) -> Pos: +def create_dict_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: pos += 1 # Skip "[" pos = skip_chars(src, pos, TOML_WS) pos, key = parse_key(src, pos) - if state.flags.is_(key, Flags.EXPLICIT_NEST) or state.flags.is_(key, Flags.FROZEN): - raise suffixed_err(src, pos, f"Can not declare {key} twice") - state.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + if out.flags.is_(key, Flags.EXPLICIT_NEST) or out.flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Cannot declare {key} twice") + out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) try: - state.out.get_or_create_nest(key) + out.data.get_or_create_nest(key) except KeyError: - raise suffixed_err(src, pos, "Can not overwrite a value") - state.header_namespace = key + raise suffixed_err(src, pos, "Cannot overwrite a value") from None - if src[pos : pos + 1] != "]": - raise suffixed_err(src, pos, 'Expected "]" at the end of a table declaration') - return pos + 1 + if not src.startswith("]", pos): + raise suffixed_err(src, pos, "Expected ']' at the end of a table declaration") + return pos + 1, key -def create_list_rule(src: str, pos: Pos, state: State) -> Pos: +def create_list_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: pos += 2 # Skip "[[" pos = skip_chars(src, pos, TOML_WS) pos, key = parse_key(src, pos) - if state.flags.is_(key, Flags.FROZEN): - raise suffixed_err(src, pos, f"Can not mutate immutable namespace {key}") + if out.flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Cannot mutate immutable namespace {key}") # Free the namespace now that it points to another empty list item... - state.flags.unset_all(key) + out.flags.unset_all(key) # ...but this key precisely is still prohibited from table declaration - state.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) try: - state.out.append_nest_to_list(key) + out.data.append_nest_to_list(key) except KeyError: - raise suffixed_err(src, pos, "Can not overwrite a value") - state.header_namespace = key + raise suffixed_err(src, pos, "Cannot overwrite a value") from None - end_marker = src[pos : pos + 2] - if end_marker != "]]": - raise suffixed_err( - src, - pos, - f'Found "{end_marker!r}" at the end of an array declaration.' - ' Expected "]]"', - ) - return pos + 2 + if not src.startswith("]]", pos): + raise suffixed_err(src, pos, "Expected ']]' at the end of an array declaration") + return pos + 2, key -def key_value_rule(src: str, pos: Pos, state: State, parse_float: ParseFloat) -> Pos: +def key_value_rule( + src: str, pos: Pos, out: Output, header: Key, parse_float: ParseFloat +) -> Pos: pos, key, value = parse_key_value_pair(src, pos, parse_float) key_parent, key_stem = key[:-1], key[-1] - abs_key_parent = state.header_namespace + key_parent - - if state.flags.is_(abs_key_parent, Flags.FROZEN): + abs_key_parent = header + key_parent + + relative_path_cont_keys = (header + key[:i] for i in range(1, len(key))) + for cont_key in relative_path_cont_keys: + # Check that dotted key syntax does not redefine an existing table + if out.flags.is_(cont_key, Flags.EXPLICIT_NEST): + raise suffixed_err(src, pos, f"Cannot redefine namespace {cont_key}") + # Containers in the relative path can't be opened with the table syntax or + # dotted key/value syntax in following table sections. + out.flags.add_pending(cont_key, Flags.EXPLICIT_NEST) + + if out.flags.is_(abs_key_parent, Flags.FROZEN): raise suffixed_err( - src, pos, f"Can not mutate immutable namespace {abs_key_parent}" + src, pos, f"Cannot mutate immutable namespace {abs_key_parent}" ) - # Containers in the relative path can't be opened with the table syntax after this - state.flags.set_for_relative_key(state.header_namespace, key, Flags.EXPLICIT_NEST) + try: - nest = state.out.get_or_create_nest(abs_key_parent) + nest = out.data.get_or_create_nest(abs_key_parent) except KeyError: - raise suffixed_err(src, pos, "Can not overwrite a value") + raise suffixed_err(src, pos, "Cannot overwrite a value") from None if key_stem in nest: - raise suffixed_err(src, pos, "Can not overwrite a value") + raise suffixed_err(src, pos, "Cannot overwrite a value") # Mark inline table and array namespaces recursively immutable if isinstance(value, (dict, list)): - abs_key = state.header_namespace + key - state.flags.set(abs_key, Flags.FROZEN, recursive=True) + out.flags.set(header + key, Flags.FROZEN, recursive=True) nest[key_stem] = value return pos def parse_key_value_pair( src: str, pos: Pos, parse_float: ParseFloat -) -> Tuple[Pos, Key, Any]: +) -> tuple[Pos, Key, Any]: pos, key = parse_key(src, pos) try: - char: Optional[str] = src[pos] + char: str | None = src[pos] except IndexError: char = None if char != "=": - raise suffixed_err(src, pos, 'Expected "=" after a key in a key/value pair') + raise suffixed_err(src, pos, "Expected '=' after a key in a key/value pair") pos += 1 pos = skip_chars(src, pos, TOML_WS) pos, value = parse_value(src, pos, parse_float) return pos, key, value -def parse_key(src: str, pos: Pos) -> Tuple[Pos, Key]: +def parse_key(src: str, pos: Pos) -> tuple[Pos, Key]: pos, key_part = parse_key_part(src, pos) - key = [key_part] + key: Key = (key_part,) pos = skip_chars(src, pos, TOML_WS) while True: try: - char: Optional[str] = src[pos] + char: str | None = src[pos] except IndexError: char = None if char != ".": - return pos, tuple(key) + return pos, key pos += 1 pos = skip_chars(src, pos, TOML_WS) pos, key_part = parse_key_part(src, pos) - key.append(key_part) + key += (key_part,) pos = skip_chars(src, pos, TOML_WS) -def parse_key_part(src: str, pos: Pos) -> Tuple[Pos, str]: +def parse_key_part(src: str, pos: Pos) -> tuple[Pos, str]: try: - char: Optional[str] = src[pos] + char: str | None = src[pos] except IndexError: char = None if char in BARE_KEY_CHARS: @@ -417,17 +404,17 @@ def parse_key_part(src: str, pos: Pos) -> Tuple[Pos, str]: raise suffixed_err(src, pos, "Invalid initial character for a key part") -def parse_one_line_basic_str(src: str, pos: Pos) -> Tuple[Pos, str]: +def parse_one_line_basic_str(src: str, pos: Pos) -> tuple[Pos, str]: pos += 1 return parse_basic_str(src, pos, multiline=False) -def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, list]: +def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos, list]: pos += 1 array: list = [] pos = skip_comments_and_array_ws(src, pos) - if src[pos : pos + 1] == "]": + if src.startswith("]", pos): return pos + 1, array while True: pos, val = parse_value(src, pos, parse_float) @@ -442,29 +429,29 @@ def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, list] pos += 1 pos = skip_comments_and_array_ws(src, pos) - if src[pos : pos + 1] == "]": + if src.startswith("]", pos): return pos + 1, array -def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, dict]: +def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos, dict]: pos += 1 nested_dict = NestedDict() flags = Flags() pos = skip_chars(src, pos, TOML_WS) - if src[pos : pos + 1] == "}": + if src.startswith("}", pos): return pos + 1, nested_dict.dict while True: pos, key, value = parse_key_value_pair(src, pos, parse_float) key_parent, key_stem = key[:-1], key[-1] if flags.is_(key, Flags.FROZEN): - raise suffixed_err(src, pos, f"Can not mutate immutable namespace {key}") + raise suffixed_err(src, pos, f"Cannot mutate immutable namespace {key}") try: nest = nested_dict.get_or_create_nest(key_parent, access_lists=False) except KeyError: - raise suffixed_err(src, pos, "Can not overwrite a value") + raise suffixed_err(src, pos, "Cannot overwrite a value") from None if key_stem in nest: - raise suffixed_err(src, pos, f'Duplicate inline table key "{key_stem}"') + raise suffixed_err(src, pos, f"Duplicate inline table key {key_stem!r}") nest[key_stem] = value pos = skip_chars(src, pos, TOML_WS) c = src[pos : pos + 1] @@ -480,7 +467,7 @@ def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos def parse_basic_str_escape( src: str, pos: Pos, *, multiline: bool = False -) -> Tuple[Pos, str]: +) -> tuple[Pos, str]: escape_id = src[pos : pos + 2] pos += 2 if multiline and escape_id in {"\\ ", "\\\t", "\\\n"}: @@ -488,11 +475,12 @@ def parse_basic_str_escape( # the doc. Error if non-whitespace is found before newline. if escape_id != "\\\n": pos = skip_chars(src, pos, TOML_WS) - char = src[pos : pos + 1] - if not char: + try: + char = src[pos] + except IndexError: return pos, "" if char != "\n": - raise suffixed_err(src, pos, 'Unescaped "\\" in a string') + raise suffixed_err(src, pos, "Unescaped '\\' in a string") pos += 1 pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) return pos, "" @@ -503,18 +491,16 @@ def parse_basic_str_escape( try: return pos, BASIC_STR_ESCAPE_REPLACEMENTS[escape_id] except KeyError: - if len(escape_id) != 2: - raise suffixed_err(src, pos, "Unterminated string") - raise suffixed_err(src, pos, 'Unescaped "\\" in a string') + raise suffixed_err(src, pos, "Unescaped '\\' in a string") from None -def parse_basic_str_escape_multiline(src: str, pos: Pos) -> Tuple[Pos, str]: +def parse_basic_str_escape_multiline(src: str, pos: Pos) -> tuple[Pos, str]: return parse_basic_str_escape(src, pos, multiline=True) -def parse_hex_char(src: str, pos: Pos, hex_len: int) -> Tuple[Pos, str]: +def parse_hex_char(src: str, pos: Pos, hex_len: int) -> tuple[Pos, str]: hex_str = src[pos : pos + hex_len] - if len(hex_str) != hex_len or any(c not in string.hexdigits for c in hex_str): + if len(hex_str) != hex_len or not HEXDIGIT_CHARS.issuperset(hex_str): raise suffixed_err(src, pos, "Invalid hex value") pos += hex_len hex_int = int(hex_str, 16) @@ -523,7 +509,7 @@ def parse_hex_char(src: str, pos: Pos, hex_len: int) -> Tuple[Pos, str]: return pos, chr(hex_int) -def parse_literal_str(src: str, pos: Pos) -> Tuple[Pos, str]: +def parse_literal_str(src: str, pos: Pos) -> tuple[Pos, str]: pos += 1 # Skip starting apostrophe start_pos = pos pos = skip_until( @@ -532,9 +518,9 @@ def parse_literal_str(src: str, pos: Pos) -> Tuple[Pos, str]: return pos + 1, src[start_pos:pos] # Skip ending apostrophe -def parse_multiline_str(src: str, pos: Pos, *, literal: bool) -> Tuple[Pos, str]: +def parse_multiline_str(src: str, pos: Pos, *, literal: bool) -> tuple[Pos, str]: pos += 3 - if src[pos : pos + 1] == "\n": + if src.startswith("\n", pos): pos += 1 if literal: @@ -554,16 +540,16 @@ def parse_multiline_str(src: str, pos: Pos, *, literal: bool) -> Tuple[Pos, str] # Add at maximum two extra apostrophes/quotes if the end sequence # is 4 or 5 chars long instead of just 3. - if src[pos : pos + 1] != delim: + if not src.startswith(delim, pos): return pos, result pos += 1 - if src[pos : pos + 1] != delim: + if not src.startswith(delim, pos): return pos, result + delim pos += 1 return pos, result + (delim * 2) -def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> Tuple[Pos, str]: +def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> tuple[Pos, str]: if multiline: error_on = ILLEGAL_MULTILINE_BASIC_STR_CHARS parse_escapes = parse_basic_str_escape_multiline @@ -576,11 +562,11 @@ def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> Tuple[Pos, str]: try: char = src[pos] except IndexError: - raise suffixed_err(src, pos, "Unterminated string") + raise suffixed_err(src, pos, "Unterminated string") from None if char == '"': if not multiline: return pos + 1, result + src[start_pos:pos] - if src[pos + 1 : pos + 3] == '""': + if src.startswith('"""', pos): return pos + 3, result + src[start_pos:pos] pos += 1 continue @@ -591,86 +577,67 @@ def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> Tuple[Pos, str]: start_pos = pos continue if char in error_on: - raise suffixed_err(src, pos, f'Illegal character "{char!r}"') + raise suffixed_err(src, pos, f"Illegal character {char!r}") pos += 1 -def parse_regex(src: str, pos: Pos, regex: "Pattern") -> Tuple[Pos, str]: - match = regex.match(src, pos) - if not match: - raise suffixed_err(src, pos, "Unexpected sequence") - return match.end(), match.group() - - def parse_value( # noqa: C901 src: str, pos: Pos, parse_float: ParseFloat -) -> Tuple[Pos, Any]: +) -> tuple[Pos, Any]: try: - char: Optional[str] = src[pos] + char: str | None = src[pos] except IndexError: char = None + # IMPORTANT: order conditions based on speed of checking and likelihood + # Basic strings if char == '"': - if src[pos + 1 : pos + 3] == '""': + if src.startswith('"""', pos): return parse_multiline_str(src, pos, literal=False) return parse_one_line_basic_str(src, pos) # Literal strings if char == "'": - if src[pos + 1 : pos + 3] == "''": + if src.startswith("'''", pos): return parse_multiline_str(src, pos, literal=True) return parse_literal_str(src, pos) # Booleans if char == "t": - if src[pos + 1 : pos + 4] == "rue": + if src.startswith("true", pos): return pos + 4, True if char == "f": - if src[pos + 1 : pos + 5] == "alse": + if src.startswith("false", pos): return pos + 5, False + # Arrays + if char == "[": + return parse_array(src, pos, parse_float) + + # Inline tables + if char == "{": + return parse_inline_table(src, pos, parse_float) + # Dates and times datetime_match = RE_DATETIME.match(src, pos) if datetime_match: try: datetime_obj = match_to_datetime(datetime_match) - except ValueError: - raise suffixed_err(src, pos, "Invalid date or datetime") + except ValueError as e: + raise suffixed_err(src, pos, "Invalid date or datetime") from e return datetime_match.end(), datetime_obj localtime_match = RE_LOCALTIME.match(src, pos) if localtime_match: return localtime_match.end(), match_to_localtime(localtime_match) - # Non-decimal integers - if char == "0": - second_char = src[pos + 1 : pos + 2] - if second_char == "x": - pos, hex_str = parse_regex(src, pos + 2, RE_HEX) - return pos, int(hex_str, 16) - if second_char == "o": - pos, oct_str = parse_regex(src, pos + 2, RE_OCT) - return pos, int(oct_str, 8) - if second_char == "b": - pos, bin_str = parse_regex(src, pos + 2, RE_BIN) - return pos, int(bin_str, 2) - - # Decimal integers and "normal" floats. + # Integers and "normal" floats. # The regex will greedily match any type starting with a decimal - # char, so needs to be located after handling of non-decimal ints, - # and dates and times. + # char, so needs to be located after handling of dates and times. number_match = RE_NUMBER.match(src, pos) if number_match: return number_match.end(), match_to_number(number_match, parse_float) - # Arrays - if char == "[": - return parse_array(src, pos, parse_float) - - # Inline tables - if char == "{": - return parse_inline_table(src, pos, parse_float) - # Special floats first_three = src[pos : pos + 3] if first_three in {"inf", "nan"}: @@ -701,3 +668,24 @@ def coord_repr(src: str, pos: Pos) -> str: def is_unicode_scalar_value(codepoint: int) -> bool: return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111) + + +def make_safe_parse_float(parse_float: ParseFloat) -> ParseFloat: + """A decorator to make `parse_float` safe. + + `parse_float` must not return dicts or lists, because these types + would be mixed with parsed TOML tables and arrays, thus confusing + the parser. The returned decorated callable raises `ValueError` + instead of returning illegal types. + """ + # The default `float` callable never returns illegal types. Optimize it. + if parse_float is float: # type: ignore[comparison-overlap] + return float + + def safe_parse_float(float_str: str) -> Any: + float_value = parse_float(float_str) + if isinstance(float_value, (dict, list)): + raise ValueError("parse_float must not return dicts or lists") + return float_value + + return safe_parse_float diff --git a/pipenv/patched/notpip/_vendor/tomli/_re.py b/pipenv/patched/notpip/_vendor/tomli/_re.py index a2ad44176b..994bb7493f 100644 --- a/pipenv/patched/notpip/_vendor/tomli/_re.py +++ b/pipenv/patched/notpip/_vendor/tomli/_re.py @@ -1,37 +1,55 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +from __future__ import annotations + from datetime import date, datetime, time, timedelta, timezone, tzinfo +from functools import lru_cache import re -from typing import TYPE_CHECKING, Any, Optional, Union +from typing import Any -if TYPE_CHECKING: - from re import Match - - from pipenv.patched.notpip._vendor.tomli._parser import ParseFloat +from ._types import ParseFloat # E.g. # - 00:32:00.999999 # - 00:32:00 -_TIME_RE_STR = r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(\.[0-9]+)?" +_TIME_RE_STR = r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?" -RE_HEX = re.compile(r"[0-9A-Fa-f](?:_?[0-9A-Fa-f])*") -RE_BIN = re.compile(r"[01](?:_?[01])*") -RE_OCT = re.compile(r"[0-7](?:_?[0-7])*") RE_NUMBER = re.compile( - r"[+-]?(?:0|[1-9](?:_?[0-9])*)" # integer - + r"(?:\.[0-9](?:_?[0-9])*)?" # optional fractional part - + r"(?:[eE][+-]?[0-9](?:_?[0-9])*)?" # optional exponent part + r""" +0 +(?: + x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex + | + b[01](?:_?[01])* # bin + | + o[0-7](?:_?[0-7])* # oct +) +| +[+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part +(?P + (?:\.[0-9](?:_?[0-9])*)? # optional fractional part + (?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part +) +""", + flags=re.VERBOSE, ) RE_LOCALTIME = re.compile(_TIME_RE_STR) RE_DATETIME = re.compile( - r"([0-9]{4})-(0[1-9]|1[0-2])-(0[1-9]|1[0-9]|2[0-9]|3[01])" # date, e.g. 1988-10-27 - + r"(?:" - + r"[T ]" - + _TIME_RE_STR - + r"(?:(Z)|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))?" # time offset - + r")?" + rf""" +([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27 +(?: + [Tt ] + {_TIME_RE_STR} + (?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset +)? +""", + flags=re.VERBOSE, ) -def match_to_datetime(match: "Match") -> Union[datetime, date]: +def match_to_datetime(match: re.Match) -> datetime | date: """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`. Raises ValueError if the match does not correspond to a valid date @@ -46,7 +64,7 @@ def match_to_datetime(match: "Match") -> Union[datetime, date]: sec_str, micros_str, zulu_time, - offset_dir_str, + offset_sign_str, offset_hour_str, offset_minute_str, ) = match.groups() @@ -54,14 +72,10 @@ def match_to_datetime(match: "Match") -> Union[datetime, date]: if hour_str is None: return date(year, month, day) hour, minute, sec = int(hour_str), int(minute_str), int(sec_str) - micros = int(micros_str[1:].ljust(6, "0")[:6]) if micros_str else 0 - if offset_dir_str: - offset_dir = 1 if offset_dir_str == "+" else -1 - tz: Optional[tzinfo] = timezone( - timedelta( - hours=offset_dir * int(offset_hour_str), - minutes=offset_dir * int(offset_minute_str), - ) + micros = int(micros_str.ljust(6, "0")) if micros_str else 0 + if offset_sign_str: + tz: tzinfo | None = cached_tz( + offset_hour_str, offset_minute_str, offset_sign_str ) elif zulu_time: tz = timezone.utc @@ -70,14 +84,24 @@ def match_to_datetime(match: "Match") -> Union[datetime, date]: return datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz) -def match_to_localtime(match: "Match") -> time: +@lru_cache(maxsize=None) +def cached_tz(hour_str: str, minute_str: str, sign_str: str) -> timezone: + sign = 1 if sign_str == "+" else -1 + return timezone( + timedelta( + hours=sign * int(hour_str), + minutes=sign * int(minute_str), + ) + ) + + +def match_to_localtime(match: re.Match) -> time: hour_str, minute_str, sec_str, micros_str = match.groups() - micros = int(micros_str[1:].ljust(6, "0")[:6]) if micros_str else 0 + micros = int(micros_str.ljust(6, "0")) if micros_str else 0 return time(int(hour_str), int(minute_str), int(sec_str), micros) -def match_to_number(match: "Match", parse_float: "ParseFloat") -> Any: - match_str = match.group() - if "." in match_str or "e" in match_str or "E" in match_str: - return parse_float(match_str) - return int(match_str) +def match_to_number(match: re.Match, parse_float: ParseFloat) -> Any: + if match.group("floatpart"): + return parse_float(match.group()) + return int(match.group(), 0) diff --git a/pipenv/patched/notpip/_vendor/tomli/_types.py b/pipenv/patched/notpip/_vendor/tomli/_types.py new file mode 100644 index 0000000000..d949412e03 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/tomli/_types.py @@ -0,0 +1,10 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +from typing import Any, Callable, Tuple + +# Type annotations +ParseFloat = Callable[[str], Any] +Key = Tuple[str, ...] +Pos = int diff --git a/pipenv/patched/notpip/_vendor/typing_extensions.LICENSE b/pipenv/patched/notpip/_vendor/typing_extensions.LICENSE new file mode 100644 index 0000000000..583f9f6e61 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/typing_extensions.LICENSE @@ -0,0 +1,254 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations (now Zope +Corporation, see http://www.zope.com). In 2001, the Python Software +Foundation (PSF, see http://www.python.org/psf/) was formed, a +non-profit organization created specifically to own Python-related +Intellectual Property. Zope Corporation is a sponsoring member of +the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are +retained in Python alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/pipenv/patched/notpip/_vendor/typing_extensions.py b/pipenv/patched/notpip/_vendor/typing_extensions.py index 9f1c7aa31e..658577c0cc 100644 --- a/pipenv/patched/notpip/_vendor/typing_extensions.py +++ b/pipenv/patched/notpip/_vendor/typing_extensions.py @@ -1,42 +1,12 @@ import abc import collections import collections.abc +import functools import operator import sys +import types as _types import typing -# After PEP 560, internal typing API was substantially reworked. -# This is especially important for Protocol class which uses internal APIs -# quite extensively. -PEP_560 = sys.version_info[:3] >= (3, 7, 0) - -if PEP_560: - GenericMeta = type -else: - # 3.6 - from typing import GenericMeta, _type_vars # noqa - -# The two functions below are copies of typing internal helpers. -# They are needed by _ProtocolMeta - - -def _no_slots_copy(dct): - dict_copy = dict(dct) - if '__slots__' in dict_copy: - for slot in dict_copy['__slots__']: - dict_copy.pop(slot, None) - return dict_copy - - -def _check_generic(cls, parameters): - if not cls.__parameters__: - raise TypeError(f"{cls} is not a generic class") - alen = len(parameters) - elen = len(cls.__parameters__) - if alen != elen: - raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments for {cls};" - f" actual {alen}, expected {elen}") - # Please keep __all__ alphabetized within each category. __all__ = [ @@ -44,9 +14,14 @@ def _check_generic(cls, parameters): 'ClassVar', 'Concatenate', 'Final', + 'LiteralString', 'ParamSpec', + 'ParamSpecArgs', + 'ParamSpecKwargs', 'Self', 'Type', + 'TypeVarTuple', + 'Unpack', # ABCs (from collections.abc). 'Awaitable', @@ -70,49 +45,100 @@ def _check_generic(cls, parameters): # One-off things. 'Annotated', + 'assert_never', + 'assert_type', + 'clear_overloads', + 'dataclass_transform', + 'get_overloads', 'final', + 'get_args', + 'get_origin', + 'get_type_hints', 'IntVar', + 'is_typeddict', 'Literal', 'NewType', 'overload', 'Protocol', + 'reveal_type', 'runtime', 'runtime_checkable', 'Text', 'TypeAlias', 'TypeGuard', 'TYPE_CHECKING', + 'Never', + 'NoReturn', + 'Required', + 'NotRequired', ] -if PEP_560: - __all__.extend(["get_args", "get_origin", "get_type_hints"]) +# for backward compatibility +PEP_560 = True +GenericMeta = type -# 3.6.2+ -if hasattr(typing, 'NoReturn'): - NoReturn = typing.NoReturn -# 3.6.0-3.6.1 -else: - class _NoReturn(typing._FinalTypingBase, _root=True): - """Special type indicating functions that never return. - Example:: +# The functions below are modified copies of typing internal helpers. +# They are needed by _ProtocolMeta and they provide support for PEP 646. - from typing import NoReturn +_marker = object() - def stop() -> NoReturn: - raise Exception('no way') - This type is invalid in other positions, e.g., ``List[NoReturn]`` - will fail in static type checkers. - """ - __slots__ = () +def _check_generic(cls, parameters, elen=_marker): + """Check correct count for parameters of a generic cls (internal helper). + This gives a nice error message in case of count mismatch. + """ + if not elen: + raise TypeError(f"{cls} is not a generic class") + if elen is _marker: + if not hasattr(cls, "__parameters__") or not cls.__parameters__: + raise TypeError(f"{cls} is not a generic class") + elen = len(cls.__parameters__) + alen = len(parameters) + if alen != elen: + if hasattr(cls, "__parameters__"): + parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] + num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters) + if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples): + return + raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};" + f" actual {alen}, expected {elen}") + + +if sys.version_info >= (3, 10): + def _should_collect_from_parameters(t): + return isinstance( + t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType) + ) +elif sys.version_info >= (3, 9): + def _should_collect_from_parameters(t): + return isinstance(t, (typing._GenericAlias, _types.GenericAlias)) +else: + def _should_collect_from_parameters(t): + return isinstance(t, typing._GenericAlias) and not t._special + - def __instancecheck__(self, obj): - raise TypeError("NoReturn cannot be used with isinstance().") +def _collect_type_vars(types, typevar_types=None): + """Collect all type variable contained in types in order of + first appearance (lexicographic order). For example:: - def __subclasscheck__(self, cls): - raise TypeError("NoReturn cannot be used with issubclass().") + _collect_type_vars((T, List[S, T])) == (T, S) + """ + if typevar_types is None: + typevar_types = typing.TypeVar + tvars = [] + for t in types: + if ( + isinstance(t, typevar_types) and + t not in tvars and + not _is_unpack(t) + ): + tvars.append(t) + if _should_collect_from_parameters(t): + tvars.extend([t for t in t.__parameters__ if t not in tvars]) + return tuple(tvars) - NoReturn = _NoReturn(_root=True) + +NoReturn = typing.NoReturn # Some unconstrained type variables. These are used by the container types. # (These are not for export.) @@ -129,7 +155,7 @@ def __subclasscheck__(self, cls): if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): Final = typing.Final # 3.7 -elif sys.version_info[:2] >= (3, 7): +else: class _FinalForm(typing._SpecialForm, _root=True): def __repr__(self): @@ -137,7 +163,7 @@ def __repr__(self): def __getitem__(self, parameters): item = typing._type_check(parameters, - f'{self._name} accepts only single type') + f'{self._name} accepts only a single type.') return typing._GenericAlias(self, (item,)) Final = _FinalForm('Final', @@ -154,67 +180,13 @@ class FastConnector(Connection): TIMEOUT = 1 # Error reported by type checker There is no runtime checking of these properties.""") -# 3.6 -else: - class _Final(typing._FinalTypingBase, _root=True): - """A special typing construct to indicate that a name - cannot be re-assigned or overridden in a subclass. - For example: - - MAX_SIZE: Final = 9000 - MAX_SIZE += 1 # Error reported by type checker - - class Connection: - TIMEOUT: Final[int] = 10 - class FastConnector(Connection): - TIMEOUT = 1 # Error reported by type checker - - There is no runtime checking of these properties. - """ - - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(typing._type_check(item, - f'{cls.__name__[1:]} accepts only single type.'), - _root=True) - raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) - - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += f'[{typing._type_repr(self.__type__)}]' - return r - - def __hash__(self): - return hash((type(self).__name__, self.__type__)) - - def __eq__(self, other): - if not isinstance(other, _Final): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other - - Final = _Final(_root=True) - -# 3.8+ -if hasattr(typing, 'final'): +if sys.version_info >= (3, 11): final = typing.final -# 3.6-3.7 else: + # @final exists in 3.8+, but we backport it for all versions + # before 3.11 to keep support for the __final__ attribute. + # See https://bugs.python.org/issue46342 def final(f): """This decorator can be used to indicate to type checkers that the decorated method cannot be overridden, and decorated class @@ -233,8 +205,17 @@ class Leaf: class Other(Leaf): # Error reported by type checker ... - There is no runtime checking of these properties. + There is no runtime checking of these properties. The decorator + sets the ``__final__`` attribute to ``True`` on the decorated object + to allow runtime introspection. """ + try: + f.__final__ = True + except (AttributeError, TypeError): + # Skip the attribute silently if it is not writable. + # AttributeError happens if the object has __slots__ or a + # read-only property, TypeError if it's a builtin class. + pass return f @@ -246,7 +227,7 @@ def IntVar(name): if hasattr(typing, 'Literal'): Literal = typing.Literal # 3.7: -elif sys.version_info[:2] >= (3, 7): +else: class _LiteralForm(typing._SpecialForm, _root=True): def __repr__(self): @@ -268,59 +249,75 @@ def __getitem__(self, parameters): Literal[...] cannot be subclassed. There is no runtime checking verifying that the parameter is actually a value instead of a type.""") -# 3.6: -else: - class _Literal(typing._FinalTypingBase, _root=True): - """A type that can be used to indicate to type checkers that the - corresponding value has a value literally equivalent to the - provided parameter. For example: - - var: Literal[4] = 4 - - The type checker understands that 'var' is literally equal to the - value 4 and no other value. - - Literal[...] cannot be subclassed. There is no runtime checking - verifying that the parameter is actually a value instead of a type. - """ - __slots__ = ('__values__',) - - def __init__(self, values=None, **kwds): - self.__values__ = values - - def __getitem__(self, values): - cls = type(self) - if self.__values__ is None: - if not isinstance(values, tuple): - values = (values,) - return cls(values, _root=True) - raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') - - def _eval_type(self, globalns, localns): - return self - - def __repr__(self): - r = super().__repr__() - if self.__values__ is not None: - r += f'[{", ".join(map(typing._type_repr, self.__values__))}]' - return r - def __hash__(self): - return hash((type(self).__name__, self.__values__)) +_overload_dummy = typing._overload_dummy # noqa - def __eq__(self, other): - if not isinstance(other, _Literal): - return NotImplemented - if self.__values__ is not None: - return self.__values__ == other.__values__ - return self is other - Literal = _Literal(_root=True) +if hasattr(typing, "get_overloads"): # 3.11+ + overload = typing.overload + get_overloads = typing.get_overloads + clear_overloads = typing.clear_overloads +else: + # {module: {qualname: {firstlineno: func}}} + _overload_registry = collections.defaultdict( + functools.partial(collections.defaultdict, dict) + ) + + def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + + The overloads for a function can be retrieved at runtime using the + get_overloads() function. + """ + # classmethod and staticmethod + f = getattr(func, "__func__", func) + try: + _overload_registry[f.__module__][f.__qualname__][ + f.__code__.co_firstlineno + ] = func + except AttributeError: + # Not a normal function; ignore. + pass + return _overload_dummy + def get_overloads(func): + """Return all defined overloads for *func* as a sequence.""" + # classmethod and staticmethod + f = getattr(func, "__func__", func) + if f.__module__ not in _overload_registry: + return [] + mod_dict = _overload_registry[f.__module__] + if f.__qualname__ not in mod_dict: + return [] + return list(mod_dict[f.__qualname__].values()) -_overload_dummy = typing._overload_dummy # noqa -overload = typing.overload + def clear_overloads(): + """Clear all overloads in the registry.""" + _overload_registry.clear() # This is not a real generic class. Don't use outside annotations. @@ -330,154 +327,30 @@ def __eq__(self, other): # A few are simply re-exported for completeness. -class _ExtensionsGenericMeta(GenericMeta): - def __subclasscheck__(self, subclass): - """This mimics a more modern GenericMeta.__subclasscheck__() logic - (that does not have problems with recursion) to work around interactions - between collections, typing, and typing_extensions on older - versions of Python, see https://github.com/python/typing/issues/501. - """ - if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") - return False - if not self.__extra__: - return super().__subclasscheck__(subclass) - res = self.__extra__.__subclasshook__(subclass) - if res is not NotImplemented: - return res - if self.__extra__ in subclass.__mro__: - return True - for scls in self.__extra__.__subclasses__(): - if isinstance(scls, GenericMeta): - continue - if issubclass(subclass, scls): - return True - return False - - Awaitable = typing.Awaitable Coroutine = typing.Coroutine AsyncIterable = typing.AsyncIterable AsyncIterator = typing.AsyncIterator - -# 3.6.1+ -if hasattr(typing, 'Deque'): - Deque = typing.Deque -# 3.6.0 -else: - class Deque(collections.deque, typing.MutableSequence[T], - metaclass=_ExtensionsGenericMeta, - extra=collections.deque): - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is Deque: - return collections.deque(*args, **kwds) - return typing._generic_new(collections.deque, cls, *args, **kwds) - +Deque = typing.Deque ContextManager = typing.ContextManager -# 3.6.2+ -if hasattr(typing, 'AsyncContextManager'): - AsyncContextManager = typing.AsyncContextManager -# 3.6.0-3.6.1 -else: - from _collections_abc import _check_methods as _check_methods_in_mro # noqa - - class AsyncContextManager(typing.Generic[T_co]): - __slots__ = () - - async def __aenter__(self): - return self - - @abc.abstractmethod - async def __aexit__(self, exc_type, exc_value, traceback): - return None - - @classmethod - def __subclasshook__(cls, C): - if cls is AsyncContextManager: - return _check_methods_in_mro(C, "__aenter__", "__aexit__") - return NotImplemented - +AsyncContextManager = typing.AsyncContextManager DefaultDict = typing.DefaultDict # 3.7.2+ if hasattr(typing, 'OrderedDict'): OrderedDict = typing.OrderedDict # 3.7.0-3.7.2 -elif (3, 7, 0) <= sys.version_info[:3] < (3, 7, 2): - OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) -# 3.6 -else: - class OrderedDict(collections.OrderedDict, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.OrderedDict): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is OrderedDict: - return collections.OrderedDict(*args, **kwds) - return typing._generic_new(collections.OrderedDict, cls, *args, **kwds) - -# 3.6.2+ -if hasattr(typing, 'Counter'): - Counter = typing.Counter -# 3.6.0-3.6.1 else: - class Counter(collections.Counter, - typing.Dict[T, int], - metaclass=_ExtensionsGenericMeta, extra=collections.Counter): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is Counter: - return collections.Counter(*args, **kwds) - return typing._generic_new(collections.Counter, cls, *args, **kwds) - -# 3.6.1+ -if hasattr(typing, 'ChainMap'): - ChainMap = typing.ChainMap -elif hasattr(collections, 'ChainMap'): - class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT], - metaclass=_ExtensionsGenericMeta, - extra=collections.ChainMap): - - __slots__ = () - - def __new__(cls, *args, **kwds): - if cls._gorg is ChainMap: - return collections.ChainMap(*args, **kwds) - return typing._generic_new(collections.ChainMap, cls, *args, **kwds) - -# 3.6.1+ -if hasattr(typing, 'AsyncGenerator'): - AsyncGenerator = typing.AsyncGenerator -# 3.6.0 -else: - class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra], - metaclass=_ExtensionsGenericMeta, - extra=collections.abc.AsyncGenerator): - __slots__ = () + OrderedDict = typing._alias(collections.OrderedDict, (KT, VT)) +Counter = typing.Counter +ChainMap = typing.ChainMap +AsyncGenerator = typing.AsyncGenerator NewType = typing.NewType Text = typing.Text TYPE_CHECKING = typing.TYPE_CHECKING -def _gorg(cls): - """This function exists for compatibility with old typing versions.""" - assert isinstance(cls, GenericMeta) - if hasattr(cls, '_gorg'): - return cls._gorg - while cls.__origin__ is not None: - cls = cls.__origin__ - return cls - - _PROTO_WHITELIST = ['Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator', 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', @@ -511,8 +384,7 @@ def _is_callable_members_only(cls): if hasattr(typing, 'Protocol'): Protocol = typing.Protocol # 3.7 -elif PEP_560: - from typing import _collect_type_vars # noqa +else: def _no_init(self, *args, **kwargs): if type(self)._is_protocol: @@ -600,7 +472,7 @@ def __class_getitem__(cls, params): "Parameters to Protocol[...] must all be unique") else: # Subscripting a regular Generic subclass. - _check_generic(cls, params) + _check_generic(cls, params, len(cls.__parameters__)) return typing._GenericAlias(cls, params) def __init_subclass__(cls, *args, **kwargs): @@ -612,7 +484,7 @@ def __init_subclass__(cls, *args, **kwargs): if error: raise TypeError("Cannot inherit from plain Generic") if '__orig_bases__' in cls.__dict__: - tvars = _collect_type_vars(cls.__orig_bases__) + tvars = typing._collect_type_vars(cls.__orig_bases__) # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. # If found, tvars must be a subset of it. # If not found, tvars is it. @@ -694,250 +566,12 @@ def _proto_hook(other): raise TypeError('Protocols can only inherit from other' f' protocols, got {repr(base)}') cls.__init__ = _no_init -# 3.6 -else: - from typing import _next_in_mro, _type_check # noqa - - def _no_init(self, *args, **kwargs): - if type(self)._is_protocol: - raise TypeError('Protocols cannot be instantiated') - - class _ProtocolMeta(GenericMeta): - """Internal metaclass for Protocol. - - This exists so Protocol classes can be generic without deriving - from Generic. - """ - def __new__(cls, name, bases, namespace, - tvars=None, args=None, origin=None, extra=None, orig_bases=None): - # This is just a version copied from GenericMeta.__new__ that - # includes "Protocol" special treatment. (Comments removed for brevity.) - assert extra is None # Protocols should not have extra - if tvars is not None: - assert origin is not None - assert all(isinstance(t, typing.TypeVar) for t in tvars), tvars - else: - tvars = _type_vars(bases) - gvars = None - for base in bases: - if base is typing.Generic: - raise TypeError("Cannot inherit from plain Generic") - if (isinstance(base, GenericMeta) and - base.__origin__ in (typing.Generic, Protocol)): - if gvars is not None: - raise TypeError( - "Cannot inherit from Generic[...] or" - " Protocol[...] multiple times.") - gvars = base.__parameters__ - if gvars is None: - gvars = tvars - else: - tvarset = set(tvars) - gvarset = set(gvars) - if not tvarset <= gvarset: - s_vars = ", ".join(str(t) for t in tvars if t not in gvarset) - s_args = ", ".join(str(g) for g in gvars) - cls_name = "Generic" if any(b.__origin__ is typing.Generic - for b in bases) else "Protocol" - raise TypeError(f"Some type variables ({s_vars}) are" - f" not listed in {cls_name}[{s_args}]") - tvars = gvars - - initial_bases = bases - if (extra is not None and type(extra) is abc.ABCMeta and - extra not in bases): - bases = (extra,) + bases - bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b - for b in bases) - if any(isinstance(b, GenericMeta) and b is not typing.Generic for b in bases): - bases = tuple(b for b in bases if b is not typing.Generic) - namespace.update({'__origin__': origin, '__extra__': extra}) - self = super(GenericMeta, cls).__new__(cls, name, bases, namespace, - _root=True) - super(GenericMeta, self).__setattr__('_gorg', - self if not origin else - _gorg(origin)) - self.__parameters__ = tvars - self.__args__ = tuple(... if a is typing._TypingEllipsis else - () if a is typing._TypingEmpty else - a for a in args) if args else None - self.__next_in_mro__ = _next_in_mro(self) - if orig_bases is None: - self.__orig_bases__ = initial_bases - elif origin is not None: - self._abc_registry = origin._abc_registry - self._abc_cache = origin._abc_cache - if hasattr(self, '_subs_tree'): - self.__tree_hash__ = (hash(self._subs_tree()) if origin else - super(GenericMeta, self).__hash__()) - return self - - def __init__(cls, *args, **kwargs): - super().__init__(*args, **kwargs) - if not cls.__dict__.get('_is_protocol', None): - cls._is_protocol = any(b is Protocol or - isinstance(b, _ProtocolMeta) and - b.__origin__ is Protocol - for b in cls.__bases__) - if cls._is_protocol: - for base in cls.__mro__[1:]: - if not (base in (object, typing.Generic) or - base.__module__ == 'collections.abc' and - base.__name__ in _PROTO_WHITELIST or - isinstance(base, typing.TypingMeta) and base._is_protocol or - isinstance(base, GenericMeta) and - base.__origin__ is typing.Generic): - raise TypeError(f'Protocols can only inherit from other' - f' protocols, got {repr(base)}') - - cls.__init__ = _no_init - - def _proto_hook(other): - if not cls.__dict__.get('_is_protocol', None): - return NotImplemented - if not isinstance(other, type): - # Same error as for issubclass(1, int) - raise TypeError('issubclass() arg 1 must be a class') - for attr in _get_protocol_attrs(cls): - for base in other.__mro__: - if attr in base.__dict__: - if base.__dict__[attr] is None: - return NotImplemented - break - annotations = getattr(base, '__annotations__', {}) - if (isinstance(annotations, typing.Mapping) and - attr in annotations and - isinstance(other, _ProtocolMeta) and - other._is_protocol): - break - else: - return NotImplemented - return True - if '__subclasshook__' not in cls.__dict__: - cls.__subclasshook__ = _proto_hook - - def __instancecheck__(self, instance): - # We need this method for situations where attributes are - # assigned in __init__. - if ((not getattr(self, '_is_protocol', False) or - _is_callable_members_only(self)) and - issubclass(instance.__class__, self)): - return True - if self._is_protocol: - if all(hasattr(instance, attr) and - (not callable(getattr(self, attr, None)) or - getattr(instance, attr) is not None) - for attr in _get_protocol_attrs(self)): - return True - return super(GenericMeta, self).__instancecheck__(instance) - - def __subclasscheck__(self, cls): - if self.__origin__ is not None: - if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']: - raise TypeError("Parameterized generics cannot be used with class " - "or instance checks") - return False - if (self.__dict__.get('_is_protocol', None) and - not self.__dict__.get('_is_runtime_protocol', None)): - if sys._getframe(1).f_globals['__name__'] in ['abc', - 'functools', - 'typing']: - return False - raise TypeError("Instance and class checks can only be used with" - " @runtime protocols") - if (self.__dict__.get('_is_runtime_protocol', None) and - not _is_callable_members_only(self)): - if sys._getframe(1).f_globals['__name__'] in ['abc', - 'functools', - 'typing']: - return super(GenericMeta, self).__subclasscheck__(cls) - raise TypeError("Protocols with non-method members" - " don't support issubclass()") - return super(GenericMeta, self).__subclasscheck__(cls) - - @typing._tp_cache - def __getitem__(self, params): - # We also need to copy this from GenericMeta.__getitem__ to get - # special treatment of "Protocol". (Comments removed for brevity.) - if not isinstance(params, tuple): - params = (params,) - if not params and _gorg(self) is not typing.Tuple: - raise TypeError( - f"Parameter list to {self.__qualname__}[...] cannot be empty") - msg = "Parameters to generic types must be types." - params = tuple(_type_check(p, msg) for p in params) - if self in (typing.Generic, Protocol): - if not all(isinstance(p, typing.TypeVar) for p in params): - raise TypeError( - f"Parameters to {repr(self)}[...] must all be type variables") - if len(set(params)) != len(params): - raise TypeError( - f"Parameters to {repr(self)}[...] must all be unique") - tvars = params - args = params - elif self in (typing.Tuple, typing.Callable): - tvars = _type_vars(params) - args = params - elif self.__origin__ in (typing.Generic, Protocol): - raise TypeError(f"Cannot subscript already-subscripted {repr(self)}") - else: - _check_generic(self, params) - tvars = _type_vars(params) - args = params - - prepend = (self,) if self.__origin__ is None else () - return self.__class__(self.__name__, - prepend + self.__bases__, - _no_slots_copy(self.__dict__), - tvars=tvars, - args=args, - origin=self, - extra=self.__extra__, - orig_bases=self.__orig_bases__) - - class Protocol(metaclass=_ProtocolMeta): - """Base class for protocol classes. Protocol classes are defined as:: - - class Proto(Protocol): - def meth(self) -> int: - ... - - Such classes are primarily used with static type checkers that recognize - structural subtyping (static duck-typing), for example:: - - class C: - def meth(self) -> int: - return 0 - - def func(x: Proto) -> int: - return x.meth() - - func(C()) # Passes static type check - - See PEP 544 for details. Protocol classes decorated with - @typing_extensions.runtime act as simple-minded runtime protocol that checks - only the presence of given attributes, ignoring their type signatures. - - Protocol classes can be generic, they are defined as:: - - class GenProto(Protocol[T]): - def meth(self) -> T: - ... - """ - __slots__ = () - _is_protocol = True - - def __new__(cls, *args, **kwds): - if _gorg(cls) is Protocol: - raise TypeError("Type Protocol cannot be instantiated; " - "it can be used only as a base class") - return typing._generic_new(cls.__next_in_mro__, cls, *args, **kwds) # 3.8+ if hasattr(typing, 'runtime_checkable'): runtime_checkable = typing.runtime_checkable -# 3.6-3.7 +# 3.7 else: def runtime_checkable(cls): """Mark a protocol class as a runtime protocol, so that it @@ -961,7 +595,7 @@ def runtime_checkable(cls): # 3.8+ if hasattr(typing, 'SupportsIndex'): SupportsIndex = typing.SupportsIndex -# 3.6-3.7 +# 3.7 else: @runtime_checkable class SupportsIndex(Protocol): @@ -972,12 +606,16 @@ def __index__(self) -> int: pass -if sys.version_info >= (3, 9, 2): +if hasattr(typing, "Required"): # The standard library TypedDict in Python 3.8 does not store runtime information # about which (if any) keys are optional. See https://bugs.python.org/issue38834 # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 + # The standard library TypedDict below Python 3.11 does not store runtime + # information about optional and required keys when using Required or NotRequired. TypedDict = typing.TypedDict + _TypedDictMeta = typing._TypedDictMeta + is_typeddict = typing.is_typeddict else: def _check_fails(cls, other): try: @@ -1061,7 +699,6 @@ def __new__(cls, name, bases, ns, total=True): annotations = {} own_annotations = ns.get('__annotations__', {}) - own_annotation_keys = set(own_annotations.keys()) msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" own_annotations = { n: typing._type_check(tp, msg) for n, tp in own_annotations.items() @@ -1075,10 +712,22 @@ def __new__(cls, name, bases, ns, total=True): optional_keys.update(base.__dict__.get('__optional_keys__', ())) annotations.update(own_annotations) - if total: - required_keys.update(own_annotation_keys) - else: - optional_keys.update(own_annotation_keys) + for annotation_key, annotation_type in own_annotations.items(): + annotation_origin = get_origin(annotation_type) + if annotation_origin is Annotated: + annotation_args = get_args(annotation_type) + if annotation_args: + annotation_type = annotation_args[0] + annotation_origin = get_origin(annotation_type) + + if annotation_origin is Required: + required_keys.add(annotation_key) + elif annotation_origin is NotRequired: + optional_keys.add(annotation_key) + elif total: + required_keys.add(annotation_key) + else: + optional_keys.add(annotation_key) tp_dict.__annotations__ = annotations tp_dict.__required_keys__ = frozenset(required_keys) @@ -1121,16 +770,127 @@ class Point2D(TypedDict): syntax forms work for Python 2.7 and 3.2+ """ + if hasattr(typing, "_TypedDictMeta"): + _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) + else: + _TYPEDDICT_TYPES = (_TypedDictMeta,) + + def is_typeddict(tp): + """Check if an annotation is a TypedDict class + + For example:: + class Film(TypedDict): + title: str + year: int + + is_typeddict(Film) # => True + is_typeddict(Union[list, str]) # => False + """ + return isinstance(tp, tuple(_TYPEDDICT_TYPES)) + + +if hasattr(typing, "assert_type"): + assert_type = typing.assert_type + +else: + def assert_type(__val, __typ): + """Assert (to the type checker) that the value is of the given type. + + When the type checker encounters a call to assert_type(), it + emits an error if the value is not of the specified type:: + + def greet(name: str) -> None: + assert_type(name, str) # ok + assert_type(name, int) # type checker error + + At runtime this returns the first argument unchanged and otherwise + does nothing. + """ + return __val + + +if hasattr(typing, "Required"): + get_type_hints = typing.get_type_hints +else: + import functools + import types + + # replaces _strip_annotations() + def _strip_extras(t): + """Strips Annotated, Required and NotRequired from a given type.""" + if isinstance(t, _AnnotatedAlias): + return _strip_extras(t.__origin__) + if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired): + return _strip_extras(t.__args__[0]) + if isinstance(t, typing._GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return t.copy_with(stripped_args) + if hasattr(types, "GenericAlias") and isinstance(t, types.GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return types.GenericAlias(t.__origin__, stripped_args) + if hasattr(types, "UnionType") and isinstance(t, types.UnionType): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return functools.reduce(operator.or_, stripped_args) -# Python 3.9+ has PEP 593 (Annotated and modified get_type_hints) + return t + + def get_type_hints(obj, globalns=None, localns=None, include_extras=False): + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, adds Optional[t] if a + default value equal to None is set and recursively replaces all + 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' + (unless 'include_extras=True'). + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + if hasattr(typing, "Annotated"): + hint = typing.get_type_hints( + obj, globalns=globalns, localns=localns, include_extras=True + ) + else: + hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) + if include_extras: + return hint + return {k: _strip_extras(t) for k, t in hint.items()} + + +# Python 3.9+ has PEP 593 (Annotated) if hasattr(typing, 'Annotated'): Annotated = typing.Annotated - get_type_hints = typing.get_type_hints # Not exported and not a public API, but needed for get_origin() and get_args() # to work. _AnnotatedAlias = typing._AnnotatedAlias # 3.7-3.8 -elif PEP_560: +else: class _AnnotatedAlias(typing._GenericAlias, _root=True): """Runtime representation of an annotated type. @@ -1214,205 +974,19 @@ def __class_getitem__(cls, params): raise TypeError("Annotated[...] should be used " "with at least two arguments (a type and an " "annotation).") - msg = "Annotated[t, ...]: t must be a type." - origin = typing._type_check(params[0], msg) - metadata = tuple(params[1:]) - return _AnnotatedAlias(origin, metadata) - - def __init_subclass__(cls, *args, **kwargs): - raise TypeError( - f"Cannot subclass {cls.__module__}.Annotated" - ) - - def _strip_annotations(t): - """Strips the annotations from a given type. - """ - if isinstance(t, _AnnotatedAlias): - return _strip_annotations(t.__origin__) - if isinstance(t, typing._GenericAlias): - stripped_args = tuple(_strip_annotations(a) for a in t.__args__) - if stripped_args == t.__args__: - return t - res = t.copy_with(stripped_args) - res._special = t._special - return res - return t - - def get_type_hints(obj, globalns=None, localns=None, include_extras=False): - """Return type hints for an object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals, adds Optional[t] if a - default value equal to None is set and recursively replaces all - 'Annotated[T, ...]' with 'T' (unless 'include_extras=True'). - - The argument may be a module, class, method, or function. The annotations - are returned as a dictionary. For classes, annotations include also - inherited members. - - TypeError is raised if the argument is not of a type that can contain - annotations, and an empty dictionary is returned if no annotations are - present. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj (or the respective module's globals for classes), - and these are also used as the locals. If the object does not appear - to have globals, an empty dictionary is used. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) - if include_extras: - return hint - return {k: _strip_annotations(t) for k, t in hint.items()} -# 3.6 -else: - - def _is_dunder(name): - """Returns True if name is a __dunder_variable_name__.""" - return len(name) > 4 and name.startswith('__') and name.endswith('__') - - # Prior to Python 3.7 types did not have `copy_with`. A lot of the equality - # checks, argument expansion etc. are done on the _subs_tre. As a result we - # can't provide a get_type_hints function that strips out annotations. - - class AnnotatedMeta(typing.GenericMeta): - """Metaclass for Annotated""" - - def __new__(cls, name, bases, namespace, **kwargs): - if any(b is not object for b in bases): - raise TypeError("Cannot subclass " + str(Annotated)) - return super().__new__(cls, name, bases, namespace, **kwargs) - - @property - def __metadata__(self): - return self._subs_tree()[2] - - def _tree_repr(self, tree): - cls, origin, metadata = tree - if not isinstance(origin, tuple): - tp_repr = typing._type_repr(origin) - else: - tp_repr = origin[0]._tree_repr(origin) - metadata_reprs = ", ".join(repr(arg) for arg in metadata) - return f'{cls}[{tp_repr}, {metadata_reprs}]' - - def _subs_tree(self, tvars=None, args=None): # noqa - if self is Annotated: - return Annotated - res = super()._subs_tree(tvars=tvars, args=args) - # Flatten nested Annotated - if isinstance(res[1], tuple) and res[1][0] is Annotated: - sub_tp = res[1][1] - sub_annot = res[1][2] - return (Annotated, sub_tp, sub_annot + res[2]) - return res - - def _get_cons(self): - """Return the class used to create instance of this type.""" - if self.__origin__ is None: - raise TypeError("Cannot get the underlying type of a " - "non-specialized Annotated type.") - tree = self._subs_tree() - while isinstance(tree, tuple) and tree[0] is Annotated: - tree = tree[1] - if isinstance(tree, tuple): - return tree[0] - else: - return tree - - @typing._tp_cache - def __getitem__(self, params): - if not isinstance(params, tuple): - params = (params,) - if self.__origin__ is not None: # specializing an instantiated type - return super().__getitem__(params) - elif not isinstance(params, tuple) or len(params) < 2: - raise TypeError("Annotated[...] should be instantiated " - "with at least two arguments (a type and an " - "annotation).") + allowed_special_forms = (ClassVar, Final) + if get_origin(params[0]) in allowed_special_forms: + origin = params[0] else: msg = "Annotated[t, ...]: t must be a type." - tp = typing._type_check(params[0], msg) - metadata = tuple(params[1:]) - return self.__class__( - self.__name__, - self.__bases__, - _no_slots_copy(self.__dict__), - tvars=_type_vars((tp,)), - # Metadata is a tuple so it won't be touched by _replace_args et al. - args=(tp, metadata), - origin=self, - ) - - def __call__(self, *args, **kwargs): - cons = self._get_cons() - result = cons(*args, **kwargs) - try: - result.__orig_class__ = self - except AttributeError: - pass - return result - - def __getattr__(self, attr): - # For simplicity we just don't relay all dunder names - if self.__origin__ is not None and not _is_dunder(attr): - return getattr(self._get_cons(), attr) - raise AttributeError(attr) - - def __setattr__(self, attr, value): - if _is_dunder(attr) or attr.startswith('_abc_'): - super().__setattr__(attr, value) - elif self.__origin__ is None: - raise AttributeError(attr) - else: - setattr(self._get_cons(), attr, value) - - def __instancecheck__(self, obj): - raise TypeError("Annotated cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Annotated cannot be used with issubclass().") - - class Annotated(metaclass=AnnotatedMeta): - """Add context specific metadata to a type. - - Example: Annotated[int, runtime_check.Unsigned] indicates to the - hypothetical runtime_check module that this type is an unsigned int. - Every other consumer of this type can ignore this metadata and treat - this type as int. - - The first argument to Annotated must be a valid type, the remaining - arguments are kept as a tuple in the __metadata__ field. - - Details: - - - It's an error to call `Annotated` with less than two arguments. - - Nested Annotated are flattened:: - - Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] - - - Instantiating an annotated type is equivalent to instantiating the - underlying type:: - - Annotated[C, Ann1](5) == C(5) - - - Annotated can be used as a generic type alias:: - - Optimized = Annotated[T, runtime.Optimize()] - Optimized[int] == Annotated[int, runtime.Optimize()] - - OptimizedList = Annotated[List[T], runtime.Optimize()] - OptimizedList[int] == Annotated[List[int], runtime.Optimize()] - """ + origin = typing._type_check(params[0], msg) + metadata = tuple(params[1:]) + return _AnnotatedAlias(origin, metadata) + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError( + f"Cannot subclass {cls.__module__}.Annotated" + ) # Python 3.8 has get_origin() and get_args() but those implementations aren't # Annotated-aware, so we can't use those. Python 3.9's versions don't support @@ -1421,7 +995,7 @@ class Annotated(metaclass=AnnotatedMeta): get_origin = typing.get_origin get_args = typing.get_args # 3.7-3.9 -elif PEP_560: +else: try: # 3.9+ from typing import _BaseGenericAlias @@ -1429,9 +1003,9 @@ class Annotated(metaclass=AnnotatedMeta): _BaseGenericAlias = typing._GenericAlias try: # 3.9+ - from typing import GenericAlias + from typing import GenericAlias as _typing_GenericAlias except ImportError: - GenericAlias = typing._GenericAlias + _typing_GenericAlias = typing._GenericAlias def get_origin(tp): """Get the unsubscripted version of a type. @@ -1450,7 +1024,7 @@ def get_origin(tp): """ if isinstance(tp, _AnnotatedAlias): return Annotated - if isinstance(tp, (typing._GenericAlias, GenericAlias, _BaseGenericAlias, + if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias, ParamSpecArgs, ParamSpecKwargs)): return tp.__origin__ if tp is typing.Generic: @@ -1470,7 +1044,7 @@ def get_args(tp): """ if isinstance(tp, _AnnotatedAlias): return (tp.__origin__,) + tp.__metadata__ - if isinstance(tp, (typing._GenericAlias, GenericAlias)): + if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)): if getattr(tp, "_special", False): return () res = tp.__args__ @@ -1503,7 +1077,7 @@ def TypeAlias(self, parameters): """ raise TypeError(f"{self} is not subscriptable") # 3.7-3.8 -elif sys.version_info[:2] >= (3, 7): +else: class _TypeAliasForm(typing._SpecialForm, _root=True): def __repr__(self): return 'typing_extensions.' + self._name @@ -1519,44 +1093,13 @@ def __repr__(self): It's invalid when used anywhere except as in the example above.""") -# 3.6 -else: - class _TypeAliasMeta(typing.TypingMeta): - """Metaclass for TypeAlias""" - - def __repr__(self): - return 'typing_extensions.TypeAlias' - - class _TypeAliasBase(typing._FinalTypingBase, metaclass=_TypeAliasMeta, _root=True): - """Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - - For example:: - - Predicate: TypeAlias = Callable[..., bool] - - It's invalid when used anywhere except as in the example above. - """ - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("TypeAlias cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("TypeAlias cannot be used with issubclass().") - - def __repr__(self): - return 'typing_extensions.TypeAlias' - - TypeAlias = _TypeAliasBase(_root=True) # Python 3.10+ has PEP 612 if hasattr(typing, 'ParamSpecArgs'): ParamSpecArgs = typing.ParamSpecArgs ParamSpecKwargs = typing.ParamSpecKwargs -# 3.6-3.9 +# 3.7-3.9 else: class _Immutable: """Mixin to indicate that object should not be copied.""" @@ -1586,6 +1129,11 @@ def __init__(self, origin): def __repr__(self): return f"{self.__origin__.__name__}.args" + def __eq__(self, other): + if not isinstance(other, ParamSpecArgs): + return NotImplemented + return self.__origin__ == other.__origin__ + class ParamSpecKwargs(_Immutable): """The kwargs for a ParamSpec object. @@ -1604,10 +1152,15 @@ def __init__(self, origin): def __repr__(self): return f"{self.__origin__.__name__}.kwargs" + def __eq__(self, other): + if not isinstance(other, ParamSpecKwargs): + return NotImplemented + return self.__origin__ == other.__origin__ + # 3.10+ if hasattr(typing, 'ParamSpec'): ParamSpec = typing.ParamSpec -# 3.6-3.9 +# 3.7-3.9 else: # Inherits from list as a workaround for Callable checks in Python < 3.9.2. @@ -1709,28 +1262,17 @@ def __reduce__(self): def __call__(self, *args, **kwargs): pass - if not PEP_560: - # Only needed in 3.6. - def _get_type_vars(self, tvars): - if self not in tvars: - tvars.append(self) - -# 3.6-3.9 +# 3.7-3.9 if not hasattr(typing, 'Concatenate'): # Inherits from list as a workaround for Callable checks in Python < 3.9.2. class _ConcatenateGenericAlias(list): # Trick Generic into looking into this for __parameters__. - if PEP_560: - __class__ = typing._GenericAlias - else: - __class__ = typing._TypingBase + __class__ = typing._GenericAlias # Flag in 3.8. _special = False - # Attribute in 3.6 and earlier. - _gorg = typing.Generic def __init__(self, origin, args): super().__init__(args) @@ -1755,14 +1297,8 @@ def __parameters__(self): tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) ) - if not PEP_560: - # Only required in 3.6. - def _get_type_vars(self, tvars): - if self.__origin__ and self.__parameters__: - typing._get_type_vars(self.__parameters__, tvars) - -# 3.6-3.9 +# 3.7-3.9 @typing._tp_cache def _concatenate_getitem(self, parameters): if parameters == (): @@ -1797,7 +1333,7 @@ def Concatenate(self, parameters): """ return _concatenate_getitem(self, parameters) # 3.7-8 -elif sys.version_info[:2] >= (3, 7): +else: class _ConcatenateForm(typing._SpecialForm, _root=True): def __repr__(self): return 'typing_extensions.' + self._name @@ -1817,42 +1353,6 @@ def __getitem__(self, parameters): See PEP 612 for detailed information. """) -# 3.6 -else: - class _ConcatenateAliasMeta(typing.TypingMeta): - """Metaclass for Concatenate.""" - - def __repr__(self): - return 'typing_extensions.Concatenate' - - class _ConcatenateAliasBase(typing._FinalTypingBase, - metaclass=_ConcatenateAliasMeta, - _root=True): - """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a - higher order function which adds, removes or transforms parameters of a - callable. - - For example:: - - Callable[Concatenate[int, P], int] - - See PEP 612 for detailed information. - """ - __slots__ = () - - def __instancecheck__(self, obj): - raise TypeError("Concatenate cannot be used with isinstance().") - - def __subclasscheck__(self, cls): - raise TypeError("Concatenate cannot be used with issubclass().") - - def __repr__(self): - return 'typing_extensions.Concatenate' - - def __getitem__(self, parameters): - return _concatenate_getitem(self, parameters) - - Concatenate = _ConcatenateAliasBase(_root=True) # 3.10+ if hasattr(typing, 'TypeGuard'): @@ -1907,10 +1407,10 @@ def is_str(val: Union[str, float]): ``TypeGuard`` also works with type variables. For more information, see PEP 647 (User-Defined Type Guards). """ - item = typing._type_check(parameters, f'{self} accepts only single type.') + item = typing._type_check(parameters, f'{self} accepts only a single type.') return typing._GenericAlias(self, (item,)) # 3.7-3.8 -elif sys.version_info[:2] >= (3, 7): +else: class _TypeGuardForm(typing._SpecialForm, _root=True): def __repr__(self): @@ -1965,135 +1465,78 @@ def is_str(val: Union[str, float]): ``TypeGuard`` also works with type variables. For more information, see PEP 647 (User-Defined Type Guards). """) -# 3.6 -else: - class _TypeGuard(typing._FinalTypingBase, _root=True): - """Special typing form used to annotate the return type of a user-defined - type guard function. ``TypeGuard`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeGuard[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeGuard`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the type inside ``TypeGuard``. - - For example:: - - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... - Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower - form of ``TypeA`` (it can even be a wider form) and this may lead to - type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of - writing type-safe type guards is left to the user. - ``TypeGuard`` also works with type variables. For more information, see - PEP 647 (User-Defined Type Guards). - """ +# Vendored from cpython typing._SpecialFrom +class _SpecialForm(typing._Final, _root=True): + __slots__ = ('_name', '__doc__', '_getitem') - __slots__ = ('__type__',) + def __init__(self, getitem): + self._getitem = getitem + self._name = getitem.__name__ + self.__doc__ = getitem.__doc__ - def __init__(self, tp=None, **kwds): - self.__type__ = tp + def __getattr__(self, item): + if item in {'__name__', '__qualname__'}: + return self._name - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(typing._type_check(item, - f'{cls.__name__[1:]} accepts only a single type.'), - _root=True) - raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted') + raise AttributeError(item) - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) + def __mro_entries__(self, bases): + raise TypeError(f"Cannot subclass {self!r}") - def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += f'[{typing._type_repr(self.__type__)}]' - return r + def __repr__(self): + return f'typing_extensions.{self._name}' - def __hash__(self): - return hash((type(self).__name__, self.__type__)) + def __reduce__(self): + return self._name - def __eq__(self, other): - if not isinstance(other, _TypeGuard): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ - return self is other + def __call__(self, *args, **kwds): + raise TypeError(f"Cannot instantiate {self!r}") - TypeGuard = _TypeGuard(_root=True) + def __or__(self, other): + return typing.Union[self, other] -if hasattr(typing, "Self"): - Self = typing.Self -elif sys.version_info[:2] >= (3, 7): - # Vendored from cpython typing._SpecialFrom - class _SpecialForm(typing._Final, _root=True): - __slots__ = ('_name', '__doc__', '_getitem') + def __ror__(self, other): + return typing.Union[other, self] - def __init__(self, getitem): - self._getitem = getitem - self._name = getitem.__name__ - self.__doc__ = getitem.__doc__ + def __instancecheck__(self, obj): + raise TypeError(f"{self} cannot be used with isinstance()") - def __getattr__(self, item): - if item in {'__name__', '__qualname__'}: - return self._name + def __subclasscheck__(self, cls): + raise TypeError(f"{self} cannot be used with issubclass()") - raise AttributeError(item) + @typing._tp_cache + def __getitem__(self, parameters): + return self._getitem(self, parameters) - def __mro_entries__(self, bases): - raise TypeError(f"Cannot subclass {self!r}") - def __repr__(self): - return f'typing_extensions.{self._name}' +if hasattr(typing, "LiteralString"): + LiteralString = typing.LiteralString +else: + @_SpecialForm + def LiteralString(self, params): + """Represents an arbitrary literal string. - def __reduce__(self): - return self._name + Example:: - def __call__(self, *args, **kwds): - raise TypeError(f"Cannot instantiate {self!r}") + from pipenv.patched.notpip._vendor.typing_extensions import LiteralString - def __or__(self, other): - return typing.Union[self, other] + def query(sql: LiteralString) -> ...: + ... - def __ror__(self, other): - return typing.Union[other, self] + query("SELECT * FROM table") # ok + query(f"SELECT * FROM {input()}") # not ok - def __instancecheck__(self, obj): - raise TypeError(f"{self} cannot be used with isinstance()") + See PEP 675 for details. - def __subclasscheck__(self, cls): - raise TypeError(f"{self} cannot be used with issubclass()") + """ + raise TypeError(f"{self} is not subscriptable") - @typing._tp_cache - def __getitem__(self, parameters): - return self._getitem(self, parameters) +if hasattr(typing, "Self"): + Self = typing.Self +else: @_SpecialForm def Self(self, params): """Used to spell the type of "self" in classes. @@ -2110,30 +1553,36 @@ def parse(self, data: bytes) -> Self: """ raise TypeError(f"{self} is not subscriptable") -else: - class _Self(typing._FinalTypingBase, _root=True): - """Used to spell the type of "self" in classes. - Example:: - from typing import Self +if hasattr(typing, "Never"): + Never = typing.Never +else: + @_SpecialForm + def Never(self, params): + """The bottom type, a type that has no members. - class ReturnsSelf: - def parse(self, data: bytes) -> Self: - ... - return self + This can be used to define a function that should never be + called, or a function that never returns:: - """ + from pipenv.patched.notpip._vendor.typing_extensions import Never - __slots__ = () + def never_call_me(arg: Never) -> None: + pass - def __instancecheck__(self, obj): - raise TypeError(f"{self} cannot be used with isinstance().") + def int_or_str(arg: int | str) -> None: + never_call_me(arg) # type checker error + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + never_call_me(arg) # ok, arg is of type Never - def __subclasscheck__(self, cls): - raise TypeError(f"{self} cannot be used with issubclass().") + """ - Self = _Self(_root=True) + raise TypeError(f"{self} is not subscriptable") if hasattr(typing, 'Required'): @@ -2161,7 +1610,7 @@ class Movie(TypedDict, total=False): There is no runtime checking that a required key is actually provided when instantiating a related TypedDict. """ - item = typing._type_check(parameters, f'{self._name} accepts only single type') + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') return typing._GenericAlias(self, (item,)) @_ExtensionsSpecialForm @@ -2178,17 +1627,17 @@ class Movie(TypedDict): year=1999, ) """ - item = typing._type_check(parameters, f'{self._name} accepts only single type') + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') return typing._GenericAlias(self, (item,)) -elif sys.version_info[:2] >= (3, 7): +else: class _RequiredForm(typing._SpecialForm, _root=True): def __repr__(self): return 'typing_extensions.' + self._name def __getitem__(self, parameters): item = typing._type_check(parameters, - '{} accepts only single type'.format(self._name)) + f'{self._name} accepts only a single type.') return typing._GenericAlias(self, (item,)) Required = _RequiredForm( @@ -2222,75 +1671,290 @@ class Movie(TypedDict): year=1999, ) """) + + +if hasattr(typing, "Unpack"): # 3.11+ + Unpack = typing.Unpack +elif sys.version_info[:2] >= (3, 9): + class _UnpackSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + class _UnpackAlias(typing._GenericAlias, _root=True): + __class__ = typing.TypeVar + + @_UnpackSpecialForm + def Unpack(self, parameters): + """A special typing construct to unpack a variadic type. For example: + + Shape = TypeVarTuple('Shape') + Batch = NewType('Batch', int) + + def add_batch_axis( + x: Array[Unpack[Shape]] + ) -> Array[Batch, Unpack[Shape]]: ... + + """ + item = typing._type_check(parameters, f'{self._name} accepts only a single type.') + return _UnpackAlias(self, (item,)) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + +else: + class _UnpackAlias(typing._GenericAlias, _root=True): + __class__ = typing.TypeVar + + class _UnpackForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + def __getitem__(self, parameters): + item = typing._type_check(parameters, + f'{self._name} accepts only a single type.') + return _UnpackAlias(self, (item,)) + + Unpack = _UnpackForm( + 'Unpack', + doc="""A special typing construct to unpack a variadic type. For example: + + Shape = TypeVarTuple('Shape') + Batch = NewType('Batch', int) + + def add_batch_axis( + x: Array[Unpack[Shape]] + ) -> Array[Batch, Unpack[Shape]]: ... + + """) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + + +if hasattr(typing, "TypeVarTuple"): # 3.11+ + TypeVarTuple = typing.TypeVarTuple else: - # NOTE: Modeled after _Final's implementation when _FinalTypingBase available - class _MaybeRequired(typing._FinalTypingBase, _root=True): - __slots__ = ('__type__',) - - def __init__(self, tp=None, **kwds): - self.__type__ = tp - - def __getitem__(self, item): - cls = type(self) - if self.__type__ is None: - return cls(typing._type_check(item, - '{} accepts only single type.'.format(cls.__name__[1:])), - _root=True) - raise TypeError('{} cannot be further subscripted' - .format(cls.__name__[1:])) - - def _eval_type(self, globalns, localns): - new_tp = typing._eval_type(self.__type__, globalns, localns) - if new_tp == self.__type__: - return self - return type(self)(new_tp, _root=True) + class TypeVarTuple: + """Type variable tuple. + + Usage:: + + Ts = TypeVarTuple('Ts') + + In the same way that a normal type variable is a stand-in for a single + type such as ``int``, a type variable *tuple* is a stand-in for a *tuple* + type such as ``Tuple[int, str]``. + + Type variable tuples can be used in ``Generic`` declarations. + Consider the following example:: + + class Array(Generic[*Ts]): ... + + The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``, + where ``T1`` and ``T2`` are type variables. To use these type variables + as type parameters of ``Array``, we must *unpack* the type variable tuple using + the star operator: ``*Ts``. The signature of ``Array`` then behaves + as if we had simply written ``class Array(Generic[T1, T2]): ...``. + In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows + us to parameterise the class with an *arbitrary* number of type parameters. + + Type variable tuples can be used anywhere a normal ``TypeVar`` can. + This includes class definitions, as shown above, as well as function + signatures and variable annotations:: + + class Array(Generic[*Ts]): + + def __init__(self, shape: Tuple[*Ts]): + self._shape: Tuple[*Ts] = shape + + def get_shape(self) -> Tuple[*Ts]: + return self._shape + + shape = (Height(480), Width(640)) + x: Array[Height, Width] = Array(shape) + y = abs(x) # Inferred type is Array[Height, Width] + z = x + x # ... is Array[Height, Width] + x.get_shape() # ... is tuple[Height, Width] + + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + def __iter__(self): + yield self.__unpacked__ + + def __init__(self, name): + self.__name__ = name + + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + self.__unpacked__ = Unpack[self] def __repr__(self): - r = super().__repr__() - if self.__type__ is not None: - r += '[{}]'.format(typing._type_repr(self.__type__)) - return r + return self.__name__ def __hash__(self): - return hash((type(self).__name__, self.__type__)) + return object.__hash__(self) def __eq__(self, other): - if not isinstance(other, type(self)): - return NotImplemented - if self.__type__ is not None: - return self.__type__ == other.__type__ return self is other - class _Required(_MaybeRequired, _root=True): - """A special typing construct to mark a key of a total=False TypedDict - as required. For example: + def __reduce__(self): + return self.__name__ - class Movie(TypedDict, total=False): - title: Required[str] - year: int + def __init_subclass__(self, *args, **kwds): + if '_root' not in kwds: + raise TypeError("Cannot subclass special typing classes") - m = Movie( - title='The Matrix', # typechecker error if key is omitted - year=1999, - ) - There is no runtime checking that a required key is actually provided - when instantiating a related TypedDict. +if hasattr(typing, "reveal_type"): + reveal_type = typing.reveal_type +else: + def reveal_type(__obj: T) -> T: + """Reveal the inferred type of a variable. + + When a static type checker encounters a call to ``reveal_type()``, + it will emit the inferred type of the argument:: + + x: int = 1 + reveal_type(x) + + Running a static type checker (e.g., ``mypy``) on this example + will produce output similar to 'Revealed type is "builtins.int"'. + + At runtime, the function prints the runtime type of the + argument and returns it unchanged. + """ + print(f"Runtime type is {type(__obj).__name__!r}", file=sys.stderr) + return __obj - class _NotRequired(_MaybeRequired, _root=True): - """A special typing construct to mark a key of a TypedDict as - potentially missing. For example: - class Movie(TypedDict): - title: str - year: NotRequired[int] +if hasattr(typing, "assert_never"): + assert_never = typing.assert_never +else: + def assert_never(__arg: Never) -> Never: + """Assert to the type checker that a line of code is unreachable. + + Example:: + + def int_or_str(arg: int | str) -> None: + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + assert_never(arg) + + If a type checker finds that a call to assert_never() is + reachable, it will emit an error. + + At runtime, this throws an exception when called. - m = Movie( - title='The Matrix', # typechecker error if key is omitted - year=1999, - ) """ + raise AssertionError("Expected code to be unreachable") - Required = _Required(_root=True) - NotRequired = _NotRequired(_root=True) + +if hasattr(typing, 'dataclass_transform'): + dataclass_transform = typing.dataclass_transform +else: + def dataclass_transform( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + field_specifiers: typing.Tuple[ + typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], + ... + ] = (), + **kwargs: typing.Any, + ) -> typing.Callable[[T], T]: + """Decorator that marks a function, class, or metaclass as providing + dataclass-like behavior. + + Example: + + from pipenv.patched.notpip._vendor.typing_extensions import dataclass_transform + + _T = TypeVar("_T") + + # Used on a decorator function + @dataclass_transform() + def create_model(cls: type[_T]) -> type[_T]: + ... + return cls + + @create_model + class CustomerModel: + id: int + name: str + + # Used on a base class + @dataclass_transform() + class ModelBase: ... + + class CustomerModel(ModelBase): + id: int + name: str + + # Used on a metaclass + @dataclass_transform() + class ModelMeta(type): ... + + class ModelBase(metaclass=ModelMeta): ... + + class CustomerModel(ModelBase): + id: int + name: str + + Each of the ``CustomerModel`` classes defined in this example will now + behave similarly to a dataclass created with the ``@dataclasses.dataclass`` + decorator. For example, the type checker will synthesize an ``__init__`` + method. + + The arguments to this decorator can be used to customize this behavior: + - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be + True or False if it is omitted by the caller. + - ``order_default`` indicates whether the ``order`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``kw_only_default`` indicates whether the ``kw_only`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``field_specifiers`` specifies a static list of supported classes + or functions that describe fields, similar to ``dataclasses.field()``. + + At runtime, this decorator records its arguments in the + ``__dataclass_transform__`` attribute on the decorated object. + + See PEP 681 for details. + + """ + def decorator(cls_or_fn): + cls_or_fn.__dataclass_transform__ = { + "eq_default": eq_default, + "order_default": order_default, + "kw_only_default": kw_only_default, + "field_specifiers": field_specifiers, + "kwargs": kwargs, + } + return cls_or_fn + return decorator + + +# We have to do some monkey patching to deal with the dual nature of +# Unpack/TypeVarTuple: +# - We want Unpack to be a kind of TypeVar so it gets accepted in +# Generic[Unpack[Ts]] +# - We want it to *not* be treated as a TypeVar for the purposes of +# counting generic parameters, so that when we subscript a generic, +# the runtime doesn't try to substitute the Unpack with the subscripted type. +if not hasattr(typing, "TypeVarTuple"): + typing._collect_type_vars = _collect_type_vars + typing._check_generic = _check_generic diff --git a/pipenv/patched/notpip/_vendor/urllib3/LICENSE.txt b/pipenv/patched/notpip/_vendor/urllib3/LICENSE.txt new file mode 100644 index 0000000000..429a1767e4 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/urllib3/LICENSE.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/pipenv/patched/notpip/_vendor/urllib3/_version.py b/pipenv/patched/notpip/_vendor/urllib3/_version.py index fa8979d73e..d905b69755 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/_version.py +++ b/pipenv/patched/notpip/_vendor/urllib3/_version.py @@ -1,2 +1,2 @@ # This file is protected via CODEOWNERS -__version__ = "1.26.8" +__version__ = "1.26.9" diff --git a/pipenv/patched/notpip/_vendor/urllib3/connection.py b/pipenv/patched/notpip/_vendor/urllib3/connection.py index 4d92ac6d2c..7bf395bdac 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/connection.py +++ b/pipenv/patched/notpip/_vendor/urllib3/connection.py @@ -355,17 +355,15 @@ def set_cert( def connect(self): # Add certificate verification - conn = self._new_conn() + self.sock = conn = self._new_conn() hostname = self.host tls_in_tls = False if self._is_using_tunnel(): if self.tls_in_tls_required: - conn = self._connect_tls_proxy(hostname, conn) + self.sock = conn = self._connect_tls_proxy(hostname, conn) tls_in_tls = True - self.sock = conn - # Calls self._set_hostport(), so self.host is # self._tunnel_host below. self._tunnel() diff --git a/pipenv/patched/notpip/_vendor/urllib3/poolmanager.py b/pipenv/patched/notpip/_vendor/urllib3/poolmanager.py index 3a31a285bf..ca4ec34118 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/poolmanager.py +++ b/pipenv/patched/notpip/_vendor/urllib3/poolmanager.py @@ -34,6 +34,7 @@ "ca_cert_dir", "ssl_context", "key_password", + "server_hostname", ) # All known keyword arguments that could be provided to the pool manager, its diff --git a/pipenv/patched/notpip/_vendor/urllib3/response.py b/pipenv/patched/notpip/_vendor/urllib3/response.py index 38693f4fc6..776e49dd2b 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/response.py +++ b/pipenv/patched/notpip/_vendor/urllib3/response.py @@ -7,10 +7,7 @@ from socket import error as SocketError from socket import timeout as SocketTimeout -try: - import brotli -except ImportError: - brotli = None +brotli = None from ._collections import HTTPHeaderDict from .connection import BaseSSLError, HTTPException diff --git a/pipenv/patched/notpip/_vendor/urllib3/util/request.py b/pipenv/patched/notpip/_vendor/urllib3/util/request.py index 25103383ec..330766ef4f 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/util/request.py +++ b/pipenv/patched/notpip/_vendor/urllib3/util/request.py @@ -13,12 +13,6 @@ SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"]) ACCEPT_ENCODING = "gzip,deflate" -try: - import brotli as _unused_module_brotli # noqa: F401 -except ImportError: - pass -else: - ACCEPT_ENCODING += ",br" _FAILEDTELL = object() diff --git a/pipenv/patched/notpip/_vendor/urllib3/util/ssl_match_hostname.py b/pipenv/patched/notpip/_vendor/urllib3/util/ssl_match_hostname.py index a4b4a569cb..1dd950c489 100644 --- a/pipenv/patched/notpip/_vendor/urllib3/util/ssl_match_hostname.py +++ b/pipenv/patched/notpip/_vendor/urllib3/util/ssl_match_hostname.py @@ -112,11 +112,9 @@ def match_hostname(cert, hostname): try: # Divergence from upstream: ipaddress can't handle byte str host_ip = ipaddress.ip_address(_to_unicode(hostname)) - except ValueError: - # Not an IP address (common case) - host_ip = None - except UnicodeError: - # Divergence from upstream: Have to deal with ipaddress not taking + except (UnicodeError, ValueError): + # ValueError: Not an IP address (common case) + # UnicodeError: Divergence from upstream: Have to deal with ipaddress not taking # byte strings. addresses should be all ascii, so we consider it not # an ipaddress in this case host_ip = None @@ -124,7 +122,7 @@ def match_hostname(cert, hostname): # Divergence from upstream: Make ipaddress library optional if ipaddress is None: host_ip = None - else: + else: # Defensive raise dnsnames = [] san = cert.get("subjectAltName", ()) diff --git a/pipenv/patched/notpip/_vendor/vendor.txt b/pipenv/patched/notpip/_vendor/vendor.txt index 2c93c0f8f2..345b1f2c62 100644 --- a/pipenv/patched/notpip/_vendor/vendor.txt +++ b/pipenv/patched/notpip/_vendor/vendor.txt @@ -1,25 +1,24 @@ -CacheControl==0.12.10 # Make sure to update the license in pyproject.toml for this. +CacheControl==0.12.11 # Make sure to update the license in pyproject.toml for this. colorama==0.4.4 distlib==0.3.3 -distro==1.6.0 +distro==1.7.0 html5lib==1.1 msgpack==1.0.3 packaging==21.3 pep517==0.12.0 -platformdirs==2.4.1 -progress==1.6 -pyparsing==3.0.7 +platformdirs==2.5.2 +pyparsing==3.0.8 requests==2.27.1 certifi==2021.10.08 chardet==4.0.0 idna==3.3 - urllib3==1.26.8 -rich==11.0.0 + urllib3==1.26.9 +rich==12.2.0 pygments==2.11.2 - typing_extensions==4.0.1 + typing_extensions==4.2.0 resolvelib==0.8.1 setuptools==44.0.0 six==1.16.0 tenacity==8.0.1 -tomli==1.0.3 +tomli==2.0.1 webencodings==0.5.1 diff --git a/pipenv/patched/notpip/_vendor/webencodings/LICENSE b/pipenv/patched/notpip/_vendor/webencodings/LICENSE new file mode 100644 index 0000000000..3d0d3e7059 --- /dev/null +++ b/pipenv/patched/notpip/_vendor/webencodings/LICENSE @@ -0,0 +1,31 @@ +Copyright (c) 2012 by Simon Sapin. + +Some rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * The names of the contributors may not be used to endorse or + promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pipenv/patched/notpip/appdirs.LICENSE.txt b/pipenv/patched/notpip/appdirs.LICENSE.txt deleted file mode 100644 index 107c61405e..0000000000 --- a/pipenv/patched/notpip/appdirs.LICENSE.txt +++ /dev/null @@ -1,23 +0,0 @@ -# This is the MIT license - -Copyright (c) 2010 ActiveState Software Inc. - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/pipenv/patched/patched.txt b/pipenv/patched/patched.txt index d6b9d2d2a8..16b73837be 100644 --- a/pipenv/patched/patched.txt +++ b/pipenv/patched/patched.txt @@ -1,4 +1,4 @@ crayons==0.1.2 -pip==22.0.4 +pip==22.1.2 pipfile==0.0.2 safety==1.10.3 diff --git a/pipenv/patched/safety/safety.py b/pipenv/patched/safety/safety.py index f846441f4d..7002da2fb4 100644 --- a/pipenv/patched/safety/safety.py +++ b/pipenv/patched/safety/safety.py @@ -5,7 +5,7 @@ import time from collections import namedtuple -import pipenv.patched.notpip._vendor.requests as requests +import requests from pipenv.vendor.packaging.specifiers import SpecifierSet from .constants import (API_MIRRORS, CACHE_FILE, CACHE_LICENSES_VALID_SECONDS, diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index 2190819a13..ba8d49d9b6 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -15,9 +15,6 @@ import requests from urllib3.util import parse_url as urllib3_parse -import pipenv.vendor.parse as parse -from pipenv.vendor.vistir.contextmanagers import open_file - TASK_NAME = "update" LIBRARY_DIRNAMES = { @@ -71,7 +68,6 @@ LIBRARY_RENAMES = { "pip": "pipenv.patched.notpip", "functools32": "pipenv.vendor.backports.functools_lru_cache", - "requests": "pipenv.patched.notpip._vendor.requests", } GLOBAL_REPLACEMENT = [ @@ -508,6 +504,8 @@ def download_licenses( only=False, patched=False, ): + import pipenv.vendor.parse as parse + log("Downloading licenses") if not vendor_dir: if patched: @@ -743,9 +741,14 @@ def main(ctx, package=None, type=None): else: target_dirs = [vendor_dir, patched_dir] if package: - log("Using vendor dir: %s" % vendor_dir) - vendor(ctx, vendor_dir, package=package) - download_licenses(ctx, vendor_dir, package=package) + if type is None or type == "vendor": + log("Using vendor dir: %s" % vendor_dir) + vendor(ctx, vendor_dir, package=package) + download_licenses(ctx, vendor_dir, package=package) + elif type == "patched": + log("Using patched dir: %s" % patched_dir) + vendor(ctx, patched_dir, package=package) + download_licenses(ctx, patched_dir, package=package) log("Vendored %s" % package) return for package_dir in target_dirs: @@ -770,6 +773,8 @@ def install_yaml(ctx): @invoke.task def vendor_artifact(ctx, package, version=None): + from pipenv.vendor.vistir.contextmanagers import open_file + simple = requests.get(f"https://pypi.org/simple/{package}/") pkg_str = f"{package}-{version}" soup = bs4.BeautifulSoup(simple.content) diff --git a/tasks/vendoring/patches/patched/_post_pip_import.patch b/tasks/vendoring/patches/patched/_post_pip_import.patch index 2f6ae7d037..34d9790676 100644 --- a/tasks/vendoring/patches/patched/_post_pip_import.patch +++ b/tasks/vendoring/patches/patched/_post_pip_import.patch @@ -8,7 +8,7 @@ index 204a8ca2..546caab1 100644 ) + sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))) from pipenv.patched.notpip._internal.cli.main import main as _main - + sys.exit(_main()) diff --git a/pipenv/patched/notpip/_internal/resolution/resolvelib/candidates.py b/pipenv/patched/notpip/_internal/resolution/resolvelib/candidates.py index 0ba06c52..6fdb59b7 100644 @@ -17,33 +17,33 @@ index 0ba06c52..6fdb59b7 100644 @@ -2,6 +2,7 @@ import logging import sys from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast - + +from pipenv.patched.notpip._vendor.packaging.specifiers import SpecifierSet from pipenv.patched.notpip._vendor.packaging.utils import NormalizedName, canonicalize_name from pipenv.patched.notpip._vendor.packaging.version import Version @@ -253,7 +253,10 @@ class _InstallRequirementBackedCandidate(Candidate): yield self._factory.make_requires_python_requirement(self.dist.requires_python) - + def get_install_requirement(self) -> Optional[InstallRequirement]: - return self._ireq + ireq = self._ireq + if self._version and ireq.req and not ireq.req.url: + ireq.req.specifier = SpecifierSet(f"=={self._version}") + return ireq - - + + class LinkCandidate(_InstallRequirementBackedCandidate): diff --git a/pipenv/patched/notpip/_internal/build_env.py b/pipenv/patched/notpip/_internal/build_env.py index 05457c5a..d8c66b3f 100644 --- a/pipenv/patched/notpip/_internal/build_env.py +++ b/pipenv/patched/notpip/_internal/build_env.py -@@ -17,7 +17,7 @@ from pipenv.patched.notpip._vendor.certifi import where +@@ -16,7 +16,7 @@ from pipenv.patched.notpip._vendor.certifi import where from pipenv.patched.notpip._vendor.packaging.requirements import Requirement from pipenv.patched.notpip._vendor.packaging.version import Version - + -from pipenv.patched.notpip import __file__ as pip_location +from pip import __file__ as pip_location from pipenv.patched.notpip._internal.cli.spinners import open_spinner from pipenv.patched.notpip._internal.locations import get_platlib, get_prefixed_libs, get_purelib - from pipenv.patched.notpip._internal.metadata import get_environment + from pipenv.patched.notpip._internal.metadata import get_default_environment, get_environment diff --git a/tasks/vendoring/patches/patched/pip22.patch b/tasks/vendoring/patches/patched/pip22.patch index 4823626608..e69e007097 100644 --- a/tasks/vendoring/patches/patched/pip22.patch +++ b/tasks/vendoring/patches/patched/pip22.patch @@ -117,10 +117,10 @@ index c72f24f30..df5defb16 100644 "Show help for commands.", ), diff --git a/pipenv/patched/pip/_internal/index/package_finder.py b/src/pip/_internal/index/package_finder.py -index 223d06df6..dd5283c7f 100644 +index f70f74b17..dbc440be2 100644 --- a/pipenv/patched/pip/_internal/index/package_finder.py +++ b/pipenv/patched/pip/_internal/index/package_finder.py -@@ -114,6 +114,7 @@ class LinkEvaluator: +@@ -125,6 +125,7 @@ class LinkEvaluator: target_python: TargetPython, allow_yanked: bool, ignore_requires_python: Optional[bool] = None, @@ -128,7 +128,7 @@ index 223d06df6..dd5283c7f 100644 ) -> None: """ :param project_name: The user supplied package name. -@@ -131,9 +132,13 @@ class LinkEvaluator: +@@ -142,6 +143,8 @@ class LinkEvaluator: :param ignore_requires_python: Whether to ignore incompatible PEP 503 "data-requires-python" values in HTML links. Defaults to False. @@ -137,51 +137,46 @@ index 223d06df6..dd5283c7f 100644 """ if ignore_requires_python is None: ignore_requires_python = False -+ if ignore_compatibility is None: -+ ignore_compatibility = True - - self._allow_yanked = allow_yanked - self._canonical_name = canonical_name -@@ -142,6 +147,7 @@ class LinkEvaluator: +@@ -151,6 +154,7 @@ class LinkEvaluator: + self._ignore_requires_python = ignore_requires_python + self._formats = formats self._target_python = target_python ++ self._ignore_compatibility = ignore_compatibility self.project_name = project_name -+ self._ignore_compatibility = ignore_compatibility - def evaluate_link(self, link: Link) -> Tuple[bool, Optional[str]]: - """ -@@ -166,10 +172,10 @@ class LinkEvaluator: - return (False, "not a file") - if ext not in SUPPORTED_EXTENSIONS: - return (False, f"unsupported archive format: {ext}") +@@ -181,10 +185,10 @@ class LinkEvaluator: + LinkType.format_unsupported, + f"unsupported archive format: {ext}", + ) - if "binary" not in self._formats and ext == WHEEL_EXTENSION: + if "binary" not in self._formats and ext == WHEEL_EXTENSION and not self._ignore_compatibility: - reason = "No binaries permitted for {}".format(self.project_name) - return (False, reason) + reason = f"No binaries permitted for {self.project_name}" + return (LinkType.format_unsupported, reason) - if "macosx10" in link.path and ext == ".zip": -+ if "macosx10" in link.path and ext == '.zip' and not self._ignore_compatibility: - return (False, "macosx10 one") ++ if "macosx10" in link.path and ext == ".zip" and not self._ignore_compatibility: + return (LinkType.format_unsupported, "macosx10 one") if ext == WHEEL_EXTENSION: try: -@@ -181,7 +187,7 @@ class LinkEvaluator: - return (False, reason) +@@ -199,7 +203,7 @@ class LinkEvaluator: + return (LinkType.different_project, reason) supported_tags = self._target_python.get_tags() - if not wheel.supported(supported_tags): + if not wheel.supported(supported_tags) and not self._ignore_compatibility: # Include the wheel's tags in the reason string to # simplify troubleshooting compatibility issues. - file_tags = wheel.get_formatted_file_tags() -@@ -221,7 +227,7 @@ class LinkEvaluator: + file_tags = ", ".join(wheel.get_formatted_file_tags()) +@@ -240,7 +244,7 @@ class LinkEvaluator: version_info=self._target_python.py_version_info, ignore_requires_python=self._ignore_requires_python, ) - if not supports_python: + if not supports_python and not self._ignore_compatibility: - # Return None for the reason text to suppress calling - # _log_skipped_link(). - return (False, None) -@@ -469,7 +475,10 @@ class CandidateEvaluator: + reason = f"{version} Requires-Python {link.requires_python}" + return (LinkType.requires_python_mismatch, reason) + +@@ -487,7 +491,10 @@ class CandidateEvaluator: return sorted(filtered_applicable_candidates, key=self._sort_key) @@ -193,7 +188,7 @@ index 223d06df6..dd5283c7f 100644 """ Function to pass as the `key` argument to a call to sorted() to sort InstallationCandidates by preference. -@@ -514,10 +523,13 @@ class CandidateEvaluator: +@@ -532,10 +539,13 @@ class CandidateEvaluator: ) ) except ValueError: @@ -211,7 +206,7 @@ index 223d06df6..dd5283c7f 100644 if self._prefer_binary: binary_preference = 1 if wheel.build_tag is not None: -@@ -584,6 +596,7 @@ class PackageFinder: +@@ -602,6 +612,7 @@ class PackageFinder: format_control: Optional[FormatControl] = None, candidate_prefs: Optional[CandidatePreferences] = None, ignore_requires_python: Optional[bool] = None, @@ -219,7 +214,7 @@ index 223d06df6..dd5283c7f 100644 ) -> None: """ This constructor is primarily meant to be used by the create() class -@@ -605,6 +618,7 @@ class PackageFinder: +@@ -623,6 +634,7 @@ class PackageFinder: self._ignore_requires_python = ignore_requires_python self._link_collector = link_collector self._target_python = target_python @@ -227,7 +222,7 @@ index 223d06df6..dd5283c7f 100644 self._use_deprecated_html5lib = use_deprecated_html5lib self.format_control = format_control -@@ -701,6 +715,7 @@ class PackageFinder: +@@ -727,6 +739,7 @@ class PackageFinder: target_python=self._target_python, allow_yanked=self._allow_yanked, ignore_requires_python=self._ignore_requires_python, @@ -235,6 +230,7 @@ index 223d06df6..dd5283c7f 100644 ) def _sort_links(self, links: Iterable[Link]) -> List[Link]: + diff --git a/pipenv/patched/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 02dbda194..72ab5c1dc 100644 --- a/pipenv/patched/pip/_internal/req/req_install.py diff --git a/tasks/vendoring/patches/patched/pip_index_safety.patch b/tasks/vendoring/patches/patched/pip_index_safety.patch index 7f79b103e6..8947fa5c9d 100644 --- a/tasks/vendoring/patches/patched/pip_index_safety.patch +++ b/tasks/vendoring/patches/patched/pip_index_safety.patch @@ -38,6 +38,18 @@ index e6e9469af..fe8b8c19d 100644 return link_collector diff --git a/pipenv/patched/pip/_internal/models/search_scope.py b/pipenv/patched/pip/_internal/models/search_scope.py +index e4e54c2f4..23baf19ed 100644 +--- a/pipenv/patched/pip/_internal/models/search_scope.py ++++ b/pipenv/patched/pip/_internal/models/search_scope.py +@@ -3,7 +3,7 @@ import logging + import os + import posixpath + import urllib.parse +-from typing import List ++from typing import Dict, List, Optional + + from pip._vendor.packaging.utils import canonicalize_name + index e4e54c2f4..8cf4f1e6e 100644 --- a/pipenv/patched/pip/_internal/models/search_scope.py +++ b/pipenv/patched/pip/_internal/models/search_scope.py From 031164b7d3ceedf506e56ed4f47c5e50664577b3 Mon Sep 17 00:00:00 2001 From: Matt Davis Date: Wed, 6 Jul 2022 22:53:38 -0400 Subject: [PATCH 2/4] new pip uses build_tracker and req_tracker is gone. --- pipenv/utils/resolver.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pipenv/utils/resolver.py b/pipenv/utils/resolver.py index b860d0f695..1037340b26 100644 --- a/pipenv/utils/resolver.py +++ b/pipenv/utils/resolver.py @@ -632,11 +632,11 @@ def constraints(self): def get_resolver(self, clear=False): from pipenv.vendor.pip_shims.shims import ( WheelCache, - get_requirement_tracker, + get_build_tracker, global_tempdir_manager, ) - with global_tempdir_manager(), get_requirement_tracker() as req_tracker, TemporaryDirectory( + with global_tempdir_manager(), get_build_tracker() as build_tracker, TemporaryDirectory( suffix="-build", prefix="pipenv-" ) as directory: pip_options = self.pip_options @@ -646,7 +646,7 @@ def get_resolver(self, clear=False): preparer = self.pip_command.make_requirement_preparer( temp_build_dir=directory, options=pip_options, - req_tracker=req_tracker, + build_tracker=build_tracker, session=self.session, finder=finder, use_user_site=False, From cf4adb61460764d2f1eeaf1e4057ea8ac81a8bd2 Mon Sep 17 00:00:00 2001 From: Matt Davis Date: Thu, 7 Jul 2022 06:51:49 -0400 Subject: [PATCH 3/4] Add news fragment and fix rewrite import list to include requests. --- news/5147.vendor.rst | 1 + pipenv/patched/safety/safety.py | 2 +- pipenv/vendor/pip_shims/compat.py | 42 +++++++++++++++---------------- pipenv/vendor/pip_shims/models.py | 2 +- pipenv/vendor/pip_shims/shims.py | 2 +- pipenv/vendor/pip_shims/utils.py | 10 ++++---- tasks/vendoring/__init__.py | 1 + 7 files changed, 31 insertions(+), 29 deletions(-) create mode 100644 news/5147.vendor.rst diff --git a/news/5147.vendor.rst b/news/5147.vendor.rst new file mode 100644 index 0000000000..1f4ebd6ae1 --- /dev/null +++ b/news/5147.vendor.rst @@ -0,0 +1 @@ +Vendor in latest version of ``pip==22.1.2``. diff --git a/pipenv/patched/safety/safety.py b/pipenv/patched/safety/safety.py index 7002da2fb4..f846441f4d 100644 --- a/pipenv/patched/safety/safety.py +++ b/pipenv/patched/safety/safety.py @@ -5,7 +5,7 @@ import time from collections import namedtuple -import requests +import pipenv.patched.notpip._vendor.requests as requests from pipenv.vendor.packaging.specifiers import SpecifierSet from .constants import (API_MIRRORS, CACHE_FILE, CACHE_LICENSES_VALID_SECONDS, diff --git a/pipenv/vendor/pip_shims/compat.py b/pipenv/vendor/pip_shims/compat.py index fdc67aed13..34722d719d 100644 --- a/pipenv/vendor/pip_shims/compat.py +++ b/pipenv/vendor/pip_shims/compat.py @@ -11,7 +11,7 @@ import sys import types -from packaging import specifiers +from pipenv.vendor.packaging import specifiers from .environment import MYPY_RUNNING from .utils import ( @@ -48,7 +48,7 @@ Union, ) - from requests import Session + from pipenv.patched.notpip._vendor.requests import Session from .utils import TShim, TShimmedFunc, TShimmedPath @@ -380,7 +380,7 @@ def partial_command(shimmed_path, cmd_mapping=None): """ Maps a default set of arguments across all members of a :class:`~pip_shims.models.ShimmedPath` instance, specifically for - :class:`~pip._internal.command.Command` instances which need + :class:`~pipenv.patched.notpip._internal.command.Command` instances which need `summary` and `name` arguments. :param :class:`~pip_shims.models.ShimmedPath` shimmed_path: A @@ -506,7 +506,7 @@ def get_requirement_set( :param :class:`~pip_shims.models.ShimmedPathCollection` wheel_cache_provider: A context manager provider which resolves to a `WheelCache` instance - :param install_command: A :class:`~pip._internal.commands.install.InstallCommand` + :param install_command: A :class:`~pipenv.patched.notpip._internal.commands.install.InstallCommand` instance which is used to generate the finder. :param :class:`~pip_shims.models.ShimmedPathCollection` req_set_provider: A provider to build requirement set instances. @@ -543,7 +543,7 @@ def get_requirement_set( :param install_cmd_provider: A shim for providing new install command instances. :type install_cmd_provider: :class:`~pip_shims.models.ShimmedPathCollection` :return: A new requirement set instance - :rtype: :class:`~pip._internal.req.req_set.RequirementSet` + :rtype: :class:`~pipenv.patched.notpip._internal.req.req_set.RequirementSet` """ wheel_cache_provider = resolve_possible_shim(wheel_cache_provider) req_set_provider = resolve_possible_shim(req_set_provider) @@ -598,13 +598,13 @@ def get_package_finder( # type: (...) -> TFinder """Shim for compatibility to generate package finders. - Build and return a :class:`~pip._internal.index.package_finder.PackageFinder` - instance using the :class:`~pip._internal.commands.install.InstallCommand` helper + Build and return a :class:`~pipenv.patched.notpip._internal.index.package_finder.PackageFinder` + instance using the :class:`~pipenv.patched.notpip._internal.commands.install.InstallCommand` helper method to construct the finder, shimmed with backports as needed for compatibility. :param install_cmd_provider: A shim for providing new install command instances. :type install_cmd_provider: :class:`~pip_shims.models.ShimmedPathCollection` - :param install_cmd: A :class:`~pip._internal.commands.install.InstallCommand` + :param install_cmd: A :class:`~pipenv.patched.notpip._internal.commands.install.InstallCommand` instance which is used to generate the finder. :param optparse.Values options: An optional :class:`optparse.Values` instance generated by calling `install_cmd.parser.parse_args()` typically. @@ -615,15 +615,15 @@ def get_package_finder( :param Optional[str] abi: The target abi to support, e.g. "cp38" :param Optional[str] implementation: An optional implementation string for limiting searches to a specific implementation, e.g. "cp" or "py" - :param target_python: A :class:`~pip._internal.models.target_python.TargetPython` + :param target_python: A :class:`~pipenv.patched.notpip._internal.models.target_python.TargetPython` instance (will be translated to alternate arguments if necessary on incompatible pip versions). :param Optional[bool] ignore_requires_python: Whether to ignore `requires_python` on resulting candidates, only valid after pip version 19.3.1 :param target_python_builder: A 'TargetPython' builder (e.g. the class itself, uninstantiated) - :return: A :class:`pip._internal.index.package_finder.PackageFinder` instance - :rtype: :class:`pip._internal.index.package_finder.PackageFinder` + :return: A :class:`pipenv.patched.notpip._internal.index.package_finder.PackageFinder` instance + :rtype: :class:`pipenv.patched.notpip._internal.index.package_finder.PackageFinder` :Example: @@ -719,7 +719,7 @@ def shim_unpack( # (...) -> None """ Accepts all parameters that have been valid to pass - to :func:`pip._internal.download.unpack_url` and selects or + to :func:`pipenv.patched.notpip._internal.download.unpack_url` and selects or drops parameters as needed before invoking the provided callable. @@ -728,9 +728,9 @@ def shim_unpack( :param str download_dir: The directory to download the file to :param TShimmedFunc tempdir_manager_provider: A callable or shim referring to `global_tempdir_manager` function from pip or a shimmed no-op context manager - :param Optional[:class:`~pip._internal.req.req_install.InstallRequirement`] ireq: + :param Optional[:class:`~pipenv.patched.notpip._internal.req.req_install.InstallRequirement`] ireq: an Install Requirement instance, defaults to None - :param Optional[:class:`~pip._internal.models.link.Link`] link: A Link instance, + :param Optional[:class:`~pipenv.patched.notpip._internal.models.link.Link`] link: A Link instance, defaults to None. :param Optional[str] location: A location or source directory if the target is a VCS url, defaults to None. @@ -872,7 +872,7 @@ def make_preparer( the finder, session, and options if needed, defaults to None :param Optional[TShimmedFunc] finder_provider: A package finder provider :yield: A new requirement preparer instance - :rtype: ContextManager[:class:`~pip._internal.operations.prepare.RequirementPreparer`] + :rtype: ContextManager[:class:`~pipenv.patched.notpip._internal.operations.prepare.RequirementPreparer`] :Example: @@ -889,7 +889,7 @@ def make_preparer( ... options=pip_options, finder=finder, session=session, install_cmd=ic ... ) as preparer: ... print(preparer) - + """ preparer_fn = resolve_possible_shim(preparer_fn) downloader_provider = resolve_possible_shim(downloader_provider) @@ -1068,7 +1068,7 @@ def get_resolver( the finder, session, and options if needed, defaults to None. :param bool use_pep517: Whether to use the pep517 build process. :return: A new resolver instance. - :rtype: :class:`~pip._internal.legacy_resolve.Resolver` + :rtype: :class:`~pipenv.patched.notpip._internal.legacy_resolve.Resolver` :Example: @@ -1217,7 +1217,7 @@ def resolve( # noqa:C901 Maps a dictionary of names to corresponding ``InstallRequirement`` values. - :param :class:`~pip._internal.req.req_install.InstallRequirement` ireq: An + :param :class:`~pipenv.patched.notpip._internal.req.req_install.InstallRequirement` ireq: An InstallRequirement to initiate the resolution process :param :class:`~pip_shims.models.ShimmedPathCollection` reqset_provider: A provider to build requirement set instances. @@ -1241,7 +1241,7 @@ def resolve( # noqa:C901 :param Optional[Values] options: Pip options to use if needed, defaults to None :param Optional[TSession] session: Existing session to use for getting requirements, defaults to None - :param :class:`~pip._internal.legacy_resolve.Resolver` resolver: A pre-existing + :param :class:`~pipenv.patched.notpip._internal.legacy_resolve.Resolver` resolver: A pre-existing resolver instance to use for resolution :param Optional[TFinder] finder: The package finder to use during resolution, defaults to None. @@ -1274,8 +1274,8 @@ def resolve( # noqa:C901 :param bool check_supported_wheels: Whether to check support of wheels before including them in resolution. :return: A dictionary mapping requirements to corresponding - :class:`~pip._internal.req.req_install.InstallRequirement`s - :rtype: :class:`~pip._internal.req.req_install.InstallRequirement` + :class:`~pipenv.patched.notpip._internal.req.req_install.InstallRequirement`s + :rtype: :class:`~pipenv.patched.notpip._internal.req.req_install.InstallRequirement` :Example: diff --git a/pipenv/vendor/pip_shims/models.py b/pipenv/vendor/pip_shims/models.py index 0ed63c3818..049e385ce2 100644 --- a/pipenv/vendor/pip_shims/models.py +++ b/pipenv/vendor/pip_shims/models.py @@ -76,7 +76,7 @@ def __init__( version, round_prereleases_up=True, base_import_path=None, - vendor_import_path="pip._vendor", + vendor_import_path="pipenv.patched.notpip._vendor", ): # type: (str, bool, Optional[str], str) -> None self.version = version diff --git a/pipenv/vendor/pip_shims/shims.py b/pipenv/vendor/pip_shims/shims.py index fb02937835..48f6c4324c 100644 --- a/pipenv/vendor/pip_shims/shims.py +++ b/pipenv/vendor/pip_shims/shims.py @@ -7,7 +7,7 @@ import sys import types -from packaging.version import parse as parse_version +from pipenv.vendor.packaging.version import parse as parse_version from .models import ( ShimmedPathCollection, diff --git a/pipenv/vendor/pip_shims/utils.py b/pipenv/vendor/pip_shims/utils.py index ec32082df5..2a31a4dfe6 100644 --- a/pipenv/vendor/pip_shims/utils.py +++ b/pipenv/vendor/pip_shims/utils.py @@ -130,10 +130,10 @@ def split_package(module, subimport=None): :Example: >>> from pip_shims.utils import split_package - >>> split_package("pip._internal.req.req_install", subimport="InstallRequirement") - ("pip._internal.req.req_install", "InstallRequirement") - >>> split_package("pip._internal.cli.base_command") - ("pip._internal.cli", "base_command") + >>> split_package("pipenv.patched.notpip._internal.req.req_install", subimport="InstallRequirement") + ("pipenv.patched.notpip._internal.req.req_install", "InstallRequirement") + >>> split_package("pipenv.patched.notpip._internal.cli.base_command") + ("pipenv.patched.notpip._internal.cli", "base_command") """ package = None if subimport: @@ -387,7 +387,7 @@ def get_allowed_args(fn_or_class): try: signature = inspect.signature(fn_or_class) except AttributeError: - import funcsigs + import pipenv.vendor.funcsigs as funcsigs signature = funcsigs.signature(fn_or_class) args = [] diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py index ba8d49d9b6..063b2bf671 100644 --- a/tasks/vendoring/__init__.py +++ b/tasks/vendoring/__init__.py @@ -68,6 +68,7 @@ LIBRARY_RENAMES = { "pip": "pipenv.patched.notpip", "functools32": "pipenv.vendor.backports.functools_lru_cache", + "requests": "pipenv.patched.notpip._vendor.requests", } GLOBAL_REPLACEMENT = [ From 8a4d2eb130fd173466310f59df607ea59bfc44a5 Mon Sep 17 00:00:00 2001 From: Matt Davis Date: Thu, 7 Jul 2022 09:21:57 -0400 Subject: [PATCH 4/4] Vendor in latest requirements lib and pip-shims in order to drop packaging and resolve differences in sourcing it. --- news/5147.vendor.rst | 5 +- pipenv/core.py | 12 +- pipenv/environment.py | 8 +- pipenv/patched/safety/safety.py | 2 +- pipenv/patched/safety/util.py | 4 +- pipenv/resolver.py | 8 +- pipenv/utils/dependencies.py | 6 +- pipenv/vendor/dparse/parser.py | 4 +- pipenv/vendor/packaging/LICENSE | 3 - pipenv/vendor/packaging/LICENSE.APACHE | 177 ---- pipenv/vendor/packaging/LICENSE.BSD | 23 - pipenv/vendor/packaging/__about__.py | 26 - pipenv/vendor/packaging/__init__.py | 25 - pipenv/vendor/packaging/_manylinux.py | 301 ------- pipenv/vendor/packaging/_musllinux.py | 136 --- pipenv/vendor/packaging/_structures.py | 61 -- pipenv/vendor/packaging/markers.py | 304 ------- pipenv/vendor/packaging/py.typed | 0 pipenv/vendor/packaging/requirements.py | 146 ---- pipenv/vendor/packaging/specifiers.py | 802 ------------------ pipenv/vendor/packaging/tags.py | 487 ----------- pipenv/vendor/packaging/utils.py | 136 --- pipenv/vendor/packaging/version.py | 504 ----------- pipenv/vendor/pip_shims/__init__.py | 2 +- pipenv/vendor/pip_shims/compat.py | 42 +- pipenv/vendor/pip_shims/models.py | 6 +- pipenv/vendor/pip_shims/shims.py | 2 +- pipenv/vendor/pip_shims/utils.py | 8 +- pipenv/vendor/pythonfinder/models/python.py | 2 +- pipenv/vendor/pythonfinder/utils.py | 4 +- pipenv/vendor/requirementslib/__init__.py | 2 +- pipenv/vendor/requirementslib/models/cache.py | 2 +- .../requirementslib/models/dependencies.py | 19 +- .../vendor/requirementslib/models/markers.py | 8 +- .../vendor/requirementslib/models/metadata.py | 14 +- .../vendor/requirementslib/models/project.py | 21 +- .../requirementslib/models/requirements.py | 8 +- .../requirementslib/models/setup_info.py | 31 +- pipenv/vendor/requirementslib/models/utils.py | 22 +- pipenv/vendor/requirementslib/utils.py | 4 +- pipenv/vendor/vendor.txt | 5 +- .../wheel/vendored/packaging/_typing.py | 2 +- tasks/vendoring/__init__.py | 1 + 43 files changed, 127 insertions(+), 3258 deletions(-) delete mode 100644 pipenv/vendor/packaging/LICENSE delete mode 100644 pipenv/vendor/packaging/LICENSE.APACHE delete mode 100644 pipenv/vendor/packaging/LICENSE.BSD delete mode 100644 pipenv/vendor/packaging/__about__.py delete mode 100644 pipenv/vendor/packaging/__init__.py delete mode 100644 pipenv/vendor/packaging/_manylinux.py delete mode 100644 pipenv/vendor/packaging/_musllinux.py delete mode 100644 pipenv/vendor/packaging/_structures.py delete mode 100644 pipenv/vendor/packaging/markers.py delete mode 100644 pipenv/vendor/packaging/py.typed delete mode 100644 pipenv/vendor/packaging/requirements.py delete mode 100644 pipenv/vendor/packaging/specifiers.py delete mode 100644 pipenv/vendor/packaging/tags.py delete mode 100644 pipenv/vendor/packaging/utils.py delete mode 100644 pipenv/vendor/packaging/version.py diff --git a/news/5147.vendor.rst b/news/5147.vendor.rst index 1f4ebd6ae1..ea5973a876 100644 --- a/news/5147.vendor.rst +++ b/news/5147.vendor.rst @@ -1 +1,4 @@ -Vendor in latest version of ``pip==22.1.2``. +Vendor in latest version of ``pip==22.1.2`` which upgrades ``pipenv`` from ``pip==22.0.4``. +Vendor in latest version of ``requirementslib==1.6.7`` which includes a fix for tracebacks on encountering Annotated variables. +Vendor in latest version of ``pip-shims==0.7.3`` such that imports could be rewritten to utilize ``packaging`` from vendor'd ``pip``. +Drop the ``packaging`` requirement from the ``vendor`` directory in ``pipenv``. diff --git a/pipenv/core.py b/pipenv/core.py index 18d4cd5b01..99d31e4545 100644 --- a/pipenv/core.py +++ b/pipenv/core.py @@ -1138,7 +1138,7 @@ def do_lock( # Support for --keep-outdated... if keep_outdated: - from pipenv.vendor.packaging.utils import canonicalize_name + from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name for section_name, section in ( ("default", project.packages), @@ -1370,7 +1370,7 @@ def get_pip_args( selective_upgrade: bool = False, src_dir: Optional[str] = None, ) -> List[str]: - from .vendor.packaging.version import parse as parse_version + from pipenv.patched.notpip._vendor.packaging.version import parse as parse_version arg_map = { "pre": ["--pre"], @@ -1846,7 +1846,8 @@ def do_outdated(project, pypi_mirror=None, pre=False, clear=False): from collections import namedtuple from collections.abc import Mapping - from .vendor.packaging.utils import canonicalize_name + from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name + from .vendor.requirementslib.models.requirements import Requirement from .vendor.requirementslib.models.utils import get_version @@ -2284,7 +2285,8 @@ def do_uninstall( pypi_mirror=None, ctx=None, ): - from .vendor.packaging.utils import canonicalize_name + from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name + from .vendor.requirementslib.models.requirements import Requirement # Automatically use an activated virtualenv. @@ -2994,7 +2996,7 @@ def do_clean( system=False, ): # Ensure that virtualenv is available. - from packaging.utils import canonicalize_name + from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name ensure_project( project, three=three, python=python, validate=False, pypi_mirror=pypi_mirror diff --git a/pipenv/environment.py b/pipenv/environment.py index bcaec10cfb..87b3c7e111 100644 --- a/pipenv/environment.py +++ b/pipenv/environment.py @@ -15,12 +15,12 @@ import pipenv from pipenv.environments import is_type_checking +from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name from pipenv.utils.indexes import prepare_pip_source_args from pipenv.utils.processes import subprocess_run from pipenv.utils.shell import make_posix, normalize_path from pipenv.vendor import click, vistir from pipenv.vendor.cached_property import cached_property -from pipenv.vendor.packaging.utils import canonicalize_name if is_type_checking(): from types import ModuleType @@ -29,8 +29,8 @@ import pip_shims.shims import tomlkit + from pipenv.patched.notpip._vendor.packaging.version import Version from pipenv.project import Project, TPipfile, TSource - from pipenv.vendor.packaging.version import Version BASE_WORKING_SET = pkg_resources.WorkingSet(sys.path) # TODO: Unittests for this class @@ -545,7 +545,9 @@ def pip_version(self) -> Version: Get the pip version in the environment. Useful for knowing which args we can use when installing. """ - from .vendor.packaging.version import parse as parse_version + from pipenv.patched.notpip._vendor.packaging.version import ( + parse as parse_version, + ) pip = next( iter(pkg for pkg in self.get_installed_packages() if pkg.key == "pip"), None diff --git a/pipenv/patched/safety/safety.py b/pipenv/patched/safety/safety.py index f846441f4d..d65fcdb595 100644 --- a/pipenv/patched/safety/safety.py +++ b/pipenv/patched/safety/safety.py @@ -6,7 +6,7 @@ from collections import namedtuple import pipenv.patched.notpip._vendor.requests as requests -from pipenv.vendor.packaging.specifiers import SpecifierSet +from pipenv.patched.notpip._vendor.packaging.specifiers import SpecifierSet from .constants import (API_MIRRORS, CACHE_FILE, CACHE_LICENSES_VALID_SECONDS, CACHE_VALID_SECONDS, OPEN_MIRRORS, REQUEST_TIMEOUT) diff --git a/pipenv/patched/safety/util.py b/pipenv/patched/safety/util.py index a9428054bf..213ea2fa4a 100644 --- a/pipenv/patched/safety/util.py +++ b/pipenv/patched/safety/util.py @@ -1,6 +1,6 @@ from pipenv.vendor.dparse.parser import setuptools_parse_requirements_backport as _parse_requirements from collections import namedtuple -from pipenv.vendor.packaging.version import parse as parse_version +from pipenv.patched.notpip._vendor.packaging.version import parse as parse_version import pipenv.vendor.click as click import sys import json @@ -127,7 +127,7 @@ def get_license_name_by_id(license_id, db): return None def get_packages_licenses(packages, licenses_db): - """Get the licenses for the specified packages based on their version. + """Get the licenses for the specified packages based on their version. :param packages: packages list :param licenses_db: the licenses db in the raw form. diff --git a/pipenv/resolver.py b/pipenv/resolver.py index 647b212de0..2e9163367a 100644 --- a/pipenv/resolver.py +++ b/pipenv/resolver.py @@ -201,7 +201,7 @@ def parse_pyparsing_exprs(cls, expr_iterable): @classmethod def get_markers_from_dict(cls, entry_dict): - from pipenv.vendor.packaging import markers as packaging_markers + from pipenv.patched.notpip._vendor.packaging import markers as packaging_markers from pipenv.vendor.requirementslib.models.markers import normalize_marker_str marker_keys = cls.parse_pyparsing_exprs(packaging_markers.VARIABLE) @@ -347,7 +347,7 @@ def create(cls, name, entry_dict, project, resolver, reverse_deps=None, dev=Fals @staticmethod def clean_specifier(specifier): - from pipenv.vendor.packaging.specifiers import Specifier + from pipenv.patched.notpip._vendor.packaging.specifiers import Specifier if not any(specifier.startswith(k) for k in Specifier._operators.keys()): if specifier.strip().lower() in ["any", "", "*"]: @@ -359,7 +359,7 @@ def clean_specifier(specifier): @staticmethod def strip_version(specifier): - from pipenv.vendor.packaging.specifiers import Specifier + from pipenv.patched.notpip._vendor.packaging.specifiers import Specifier op = next( iter(k for k in Specifier._operators.keys() if specifier.startswith(k)), None @@ -446,7 +446,7 @@ def get_dependency(self, name): return {} def get_parent_deps(self, unnest=False): - from pipenv.vendor.packaging.specifiers import Specifier + from pipenv.patched.notpip._vendor.packaging.specifiers import Specifier parents = [] for spec in self.reverse_deps.get(self.normalized_name, {}).get("parents", set()): diff --git a/pipenv/utils/dependencies.py b/pipenv/utils/dependencies.py index f2348a1c6b..e7731c309c 100644 --- a/pipenv/utils/dependencies.py +++ b/pipenv/utils/dependencies.py @@ -2,7 +2,7 @@ from contextlib import contextmanager from typing import Mapping, Sequence -from packaging.markers import Marker +from pipenv.patched.notpip._vendor.packaging.markers import Marker from .constants import SCHEME_LIST, VCS_LIST from .shell import temp_path @@ -49,7 +49,7 @@ def __exit__(self, *args): def get_canonical_names(packages): """Canonicalize a list of packages and return a set of canonical names""" - from pipenv.vendor.packaging.utils import canonicalize_name + from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name if not isinstance(packages, Sequence): if not isinstance(packages, str): @@ -130,7 +130,7 @@ def translate_markers(pipfile_entry): """ if not isinstance(pipfile_entry, Mapping): raise TypeError("Entry is not a pipfile formatted mapping.") - from pipenv.vendor.packaging.markers import default_environment + from pipenv.patched.notpip._vendor.packaging.markers import default_environment allowed_marker_keys = ["markers"] + list(default_environment().keys()) provided_keys = list(pipfile_entry.keys()) if hasattr(pipfile_entry, "keys") else [] diff --git a/pipenv/vendor/dparse/parser.py b/pipenv/vendor/dparse/parser.py index 368a2883e2..dded92c8e1 100644 --- a/pipenv/vendor/dparse/parser.py +++ b/pipenv/vendor/dparse/parser.py @@ -12,10 +12,10 @@ from .regex import URL_REGEX, HASH_REGEX from .dependencies import DependencyFile, Dependency -from pipenv.vendor.packaging.requirements import Requirement as PackagingRequirement, InvalidRequirement +from pipenv.patched.notpip._vendor.packaging.requirements import Requirement as PackagingRequirement, InvalidRequirement from . import filetypes import pipenv.vendor.toml as toml -from pipenv.vendor.packaging.specifiers import SpecifierSet +from pipenv.patched.notpip._vendor.packaging.specifiers import SpecifierSet import json diff --git a/pipenv/vendor/packaging/LICENSE b/pipenv/vendor/packaging/LICENSE deleted file mode 100644 index 6f62d44e4e..0000000000 --- a/pipenv/vendor/packaging/LICENSE +++ /dev/null @@ -1,3 +0,0 @@ -This software is made available under the terms of *either* of the licenses -found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made -under the terms of *both* these licenses. diff --git a/pipenv/vendor/packaging/LICENSE.APACHE b/pipenv/vendor/packaging/LICENSE.APACHE deleted file mode 100644 index f433b1a53f..0000000000 --- a/pipenv/vendor/packaging/LICENSE.APACHE +++ /dev/null @@ -1,177 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS diff --git a/pipenv/vendor/packaging/LICENSE.BSD b/pipenv/vendor/packaging/LICENSE.BSD deleted file mode 100644 index 42ce7b75c9..0000000000 --- a/pipenv/vendor/packaging/LICENSE.BSD +++ /dev/null @@ -1,23 +0,0 @@ -Copyright (c) Donald Stufft and individual contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pipenv/vendor/packaging/__about__.py b/pipenv/vendor/packaging/__about__.py deleted file mode 100644 index 3551bc2d29..0000000000 --- a/pipenv/vendor/packaging/__about__.py +++ /dev/null @@ -1,26 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -__all__ = [ - "__title__", - "__summary__", - "__uri__", - "__version__", - "__author__", - "__email__", - "__license__", - "__copyright__", -] - -__title__ = "packaging" -__summary__ = "Core utilities for Python packages" -__uri__ = "https://github.com/pypa/packaging" - -__version__ = "21.3" - -__author__ = "Donald Stufft and individual contributors" -__email__ = "donald@stufft.io" - -__license__ = "BSD-2-Clause or Apache-2.0" -__copyright__ = "2014-2019 %s" % __author__ diff --git a/pipenv/vendor/packaging/__init__.py b/pipenv/vendor/packaging/__init__.py deleted file mode 100644 index 3c50c5dcfe..0000000000 --- a/pipenv/vendor/packaging/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from .__about__ import ( - __author__, - __copyright__, - __email__, - __license__, - __summary__, - __title__, - __uri__, - __version__, -) - -__all__ = [ - "__title__", - "__summary__", - "__uri__", - "__version__", - "__author__", - "__email__", - "__license__", - "__copyright__", -] diff --git a/pipenv/vendor/packaging/_manylinux.py b/pipenv/vendor/packaging/_manylinux.py deleted file mode 100644 index 4c379aa6f6..0000000000 --- a/pipenv/vendor/packaging/_manylinux.py +++ /dev/null @@ -1,301 +0,0 @@ -import collections -import functools -import os -import re -import struct -import sys -import warnings -from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple - - -# Python does not provide platform information at sufficient granularity to -# identify the architecture of the running executable in some cases, so we -# determine it dynamically by reading the information from the running -# process. This only applies on Linux, which uses the ELF format. -class _ELFFileHeader: - # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header - class _InvalidELFFileHeader(ValueError): - """ - An invalid ELF file header was found. - """ - - ELF_MAGIC_NUMBER = 0x7F454C46 - ELFCLASS32 = 1 - ELFCLASS64 = 2 - ELFDATA2LSB = 1 - ELFDATA2MSB = 2 - EM_386 = 3 - EM_S390 = 22 - EM_ARM = 40 - EM_X86_64 = 62 - EF_ARM_ABIMASK = 0xFF000000 - EF_ARM_ABI_VER5 = 0x05000000 - EF_ARM_ABI_FLOAT_HARD = 0x00000400 - - def __init__(self, file: IO[bytes]) -> None: - def unpack(fmt: str) -> int: - try: - data = file.read(struct.calcsize(fmt)) - result: Tuple[int, ...] = struct.unpack(fmt, data) - except struct.error: - raise _ELFFileHeader._InvalidELFFileHeader() - return result[0] - - self.e_ident_magic = unpack(">I") - if self.e_ident_magic != self.ELF_MAGIC_NUMBER: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_class = unpack("B") - if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_data = unpack("B") - if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: - raise _ELFFileHeader._InvalidELFFileHeader() - self.e_ident_version = unpack("B") - self.e_ident_osabi = unpack("B") - self.e_ident_abiversion = unpack("B") - self.e_ident_pad = file.read(7) - format_h = "H" - format_i = "I" - format_q = "Q" - format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q - self.e_type = unpack(format_h) - self.e_machine = unpack(format_h) - self.e_version = unpack(format_i) - self.e_entry = unpack(format_p) - self.e_phoff = unpack(format_p) - self.e_shoff = unpack(format_p) - self.e_flags = unpack(format_i) - self.e_ehsize = unpack(format_h) - self.e_phentsize = unpack(format_h) - self.e_phnum = unpack(format_h) - self.e_shentsize = unpack(format_h) - self.e_shnum = unpack(format_h) - self.e_shstrndx = unpack(format_h) - - -def _get_elf_header() -> Optional[_ELFFileHeader]: - try: - with open(sys.executable, "rb") as f: - elf_header = _ELFFileHeader(f) - except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): - return None - return elf_header - - -def _is_linux_armhf() -> bool: - # hard-float ABI can be detected from the ELF header of the running - # process - # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf - elf_header = _get_elf_header() - if elf_header is None: - return False - result = elf_header.e_ident_class == elf_header.ELFCLASS32 - result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB - result &= elf_header.e_machine == elf_header.EM_ARM - result &= ( - elf_header.e_flags & elf_header.EF_ARM_ABIMASK - ) == elf_header.EF_ARM_ABI_VER5 - result &= ( - elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD - ) == elf_header.EF_ARM_ABI_FLOAT_HARD - return result - - -def _is_linux_i686() -> bool: - elf_header = _get_elf_header() - if elf_header is None: - return False - result = elf_header.e_ident_class == elf_header.ELFCLASS32 - result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB - result &= elf_header.e_machine == elf_header.EM_386 - return result - - -def _have_compatible_abi(arch: str) -> bool: - if arch == "armv7l": - return _is_linux_armhf() - if arch == "i686": - return _is_linux_i686() - return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"} - - -# If glibc ever changes its major version, we need to know what the last -# minor version was, so we can build the complete list of all versions. -# For now, guess what the highest minor version might be, assume it will -# be 50 for testing. Once this actually happens, update the dictionary -# with the actual value. -_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50) - - -class _GLibCVersion(NamedTuple): - major: int - minor: int - - -def _glibc_version_string_confstr() -> Optional[str]: - """ - Primary implementation of glibc_version_string using os.confstr. - """ - # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely - # to be broken or missing. This strategy is used in the standard library - # platform module. - # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 - try: - # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". - version_string = os.confstr("CS_GNU_LIBC_VERSION") - assert version_string is not None - _, version = version_string.split() - except (AssertionError, AttributeError, OSError, ValueError): - # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... - return None - return version - - -def _glibc_version_string_ctypes() -> Optional[str]: - """ - Fallback implementation of glibc_version_string using ctypes. - """ - try: - import ctypes - except ImportError: - return None - - # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen - # manpage says, "If filename is NULL, then the returned handle is for the - # main program". This way we can let the linker do the work to figure out - # which libc our process is actually using. - # - # We must also handle the special case where the executable is not a - # dynamically linked executable. This can occur when using musl libc, - # for example. In this situation, dlopen() will error, leading to an - # OSError. Interestingly, at least in the case of musl, there is no - # errno set on the OSError. The single string argument used to construct - # OSError comes from libc itself and is therefore not portable to - # hard code here. In any case, failure to call dlopen() means we - # can proceed, so we bail on our attempt. - try: - process_namespace = ctypes.CDLL(None) - except OSError: - return None - - try: - gnu_get_libc_version = process_namespace.gnu_get_libc_version - except AttributeError: - # Symbol doesn't exist -> therefore, we are not linked to - # glibc. - return None - - # Call gnu_get_libc_version, which returns a string like "2.5" - gnu_get_libc_version.restype = ctypes.c_char_p - version_str: str = gnu_get_libc_version() - # py2 / py3 compatibility: - if not isinstance(version_str, str): - version_str = version_str.decode("ascii") - - return version_str - - -def _glibc_version_string() -> Optional[str]: - """Returns glibc version string, or None if not using glibc.""" - return _glibc_version_string_confstr() or _glibc_version_string_ctypes() - - -def _parse_glibc_version(version_str: str) -> Tuple[int, int]: - """Parse glibc version. - - We use a regexp instead of str.split because we want to discard any - random junk that might come after the minor version -- this might happen - in patched/forked versions of glibc (e.g. Linaro's version of glibc - uses version strings like "2.20-2014.11"). See gh-3588. - """ - m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) - if not m: - warnings.warn( - "Expected glibc version with 2 components major.minor," - " got: %s" % version_str, - RuntimeWarning, - ) - return -1, -1 - return int(m.group("major")), int(m.group("minor")) - - -@functools.lru_cache() -def _get_glibc_version() -> Tuple[int, int]: - version_str = _glibc_version_string() - if version_str is None: - return (-1, -1) - return _parse_glibc_version(version_str) - - -# From PEP 513, PEP 600 -def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool: - sys_glibc = _get_glibc_version() - if sys_glibc < version: - return False - # Check for presence of _manylinux module. - try: - import _manylinux # noqa - except ImportError: - return True - if hasattr(_manylinux, "manylinux_compatible"): - result = _manylinux.manylinux_compatible(version[0], version[1], arch) - if result is not None: - return bool(result) - return True - if version == _GLibCVersion(2, 5): - if hasattr(_manylinux, "manylinux1_compatible"): - return bool(_manylinux.manylinux1_compatible) - if version == _GLibCVersion(2, 12): - if hasattr(_manylinux, "manylinux2010_compatible"): - return bool(_manylinux.manylinux2010_compatible) - if version == _GLibCVersion(2, 17): - if hasattr(_manylinux, "manylinux2014_compatible"): - return bool(_manylinux.manylinux2014_compatible) - return True - - -_LEGACY_MANYLINUX_MAP = { - # CentOS 7 w/ glibc 2.17 (PEP 599) - (2, 17): "manylinux2014", - # CentOS 6 w/ glibc 2.12 (PEP 571) - (2, 12): "manylinux2010", - # CentOS 5 w/ glibc 2.5 (PEP 513) - (2, 5): "manylinux1", -} - - -def platform_tags(linux: str, arch: str) -> Iterator[str]: - if not _have_compatible_abi(arch): - return - # Oldest glibc to be supported regardless of architecture is (2, 17). - too_old_glibc2 = _GLibCVersion(2, 16) - if arch in {"x86_64", "i686"}: - # On x86/i686 also oldest glibc to be supported is (2, 5). - too_old_glibc2 = _GLibCVersion(2, 4) - current_glibc = _GLibCVersion(*_get_glibc_version()) - glibc_max_list = [current_glibc] - # We can assume compatibility across glibc major versions. - # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 - # - # Build a list of maximum glibc versions so that we can - # output the canonical list of all glibc from current_glibc - # down to too_old_glibc2, including all intermediary versions. - for glibc_major in range(current_glibc.major - 1, 1, -1): - glibc_minor = _LAST_GLIBC_MINOR[glibc_major] - glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) - for glibc_max in glibc_max_list: - if glibc_max.major == too_old_glibc2.major: - min_minor = too_old_glibc2.minor - else: - # For other glibc major versions oldest supported is (x, 0). - min_minor = -1 - for glibc_minor in range(glibc_max.minor, min_minor, -1): - glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) - tag = "manylinux_{}_{}".format(*glibc_version) - if _is_compatible(tag, arch, glibc_version): - yield linux.replace("linux", tag) - # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. - if glibc_version in _LEGACY_MANYLINUX_MAP: - legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] - if _is_compatible(legacy_tag, arch, glibc_version): - yield linux.replace("linux", legacy_tag) diff --git a/pipenv/vendor/packaging/_musllinux.py b/pipenv/vendor/packaging/_musllinux.py deleted file mode 100644 index 8ac3059ba3..0000000000 --- a/pipenv/vendor/packaging/_musllinux.py +++ /dev/null @@ -1,136 +0,0 @@ -"""PEP 656 support. - -This module implements logic to detect if the currently running Python is -linked against musl, and what musl version is used. -""" - -import contextlib -import functools -import operator -import os -import re -import struct -import subprocess -import sys -from typing import IO, Iterator, NamedTuple, Optional, Tuple - - -def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]: - return struct.unpack(fmt, f.read(struct.calcsize(fmt))) - - -def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]: - """Detect musl libc location by parsing the Python executable. - - Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca - ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html - """ - f.seek(0) - try: - ident = _read_unpacked(f, "16B") - except struct.error: - return None - if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF. - return None - f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version. - - try: - # e_fmt: Format for program header. - # p_fmt: Format for section header. - # p_idx: Indexes to find p_type, p_offset, and p_filesz. - e_fmt, p_fmt, p_idx = { - 1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit. - 2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit. - }[ident[4]] - except KeyError: - return None - else: - p_get = operator.itemgetter(*p_idx) - - # Find the interpreter section and return its content. - try: - _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt) - except struct.error: - return None - for i in range(e_phnum + 1): - f.seek(e_phoff + e_phentsize * i) - try: - p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt)) - except struct.error: - return None - if p_type != 3: # Not PT_INTERP. - continue - f.seek(p_offset) - interpreter = os.fsdecode(f.read(p_filesz)).strip("\0") - if "musl" not in interpreter: - return None - return interpreter - return None - - -class _MuslVersion(NamedTuple): - major: int - minor: int - - -def _parse_musl_version(output: str) -> Optional[_MuslVersion]: - lines = [n for n in (n.strip() for n in output.splitlines()) if n] - if len(lines) < 2 or lines[0][:4] != "musl": - return None - m = re.match(r"Version (\d+)\.(\d+)", lines[1]) - if not m: - return None - return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) - - -@functools.lru_cache() -def _get_musl_version(executable: str) -> Optional[_MuslVersion]: - """Detect currently-running musl runtime version. - - This is done by checking the specified executable's dynamic linking - information, and invoking the loader to parse its output for a version - string. If the loader is musl, the output would be something like:: - - musl libc (x86_64) - Version 1.2.2 - Dynamic Program Loader - """ - with contextlib.ExitStack() as stack: - try: - f = stack.enter_context(open(executable, "rb")) - except OSError: - return None - ld = _parse_ld_musl_from_elf(f) - if not ld: - return None - proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True) - return _parse_musl_version(proc.stderr) - - -def platform_tags(arch: str) -> Iterator[str]: - """Generate musllinux tags compatible to the current platform. - - :param arch: Should be the part of platform tag after the ``linux_`` - prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a - prerequisite for the current platform to be musllinux-compatible. - - :returns: An iterator of compatible musllinux tags. - """ - sys_musl = _get_musl_version(sys.executable) - if sys_musl is None: # Python not dynamically linked against musl. - return - for minor in range(sys_musl.minor, -1, -1): - yield f"musllinux_{sys_musl.major}_{minor}_{arch}" - - -if __name__ == "__main__": # pragma: no cover - import sysconfig - - plat = sysconfig.get_platform() - assert plat.startswith("linux-"), "not linux" - - print("plat:", plat) - print("musl:", _get_musl_version(sys.executable)) - print("tags:", end=" ") - for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): - print(t, end="\n ") diff --git a/pipenv/vendor/packaging/_structures.py b/pipenv/vendor/packaging/_structures.py deleted file mode 100644 index 90a6465f96..0000000000 --- a/pipenv/vendor/packaging/_structures.py +++ /dev/null @@ -1,61 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - - -class InfinityType: - def __repr__(self) -> str: - return "Infinity" - - def __hash__(self) -> int: - return hash(repr(self)) - - def __lt__(self, other: object) -> bool: - return False - - def __le__(self, other: object) -> bool: - return False - - def __eq__(self, other: object) -> bool: - return isinstance(other, self.__class__) - - def __gt__(self, other: object) -> bool: - return True - - def __ge__(self, other: object) -> bool: - return True - - def __neg__(self: object) -> "NegativeInfinityType": - return NegativeInfinity - - -Infinity = InfinityType() - - -class NegativeInfinityType: - def __repr__(self) -> str: - return "-Infinity" - - def __hash__(self) -> int: - return hash(repr(self)) - - def __lt__(self, other: object) -> bool: - return True - - def __le__(self, other: object) -> bool: - return True - - def __eq__(self, other: object) -> bool: - return isinstance(other, self.__class__) - - def __gt__(self, other: object) -> bool: - return False - - def __ge__(self, other: object) -> bool: - return False - - def __neg__(self: object) -> InfinityType: - return Infinity - - -NegativeInfinity = NegativeInfinityType() diff --git a/pipenv/vendor/packaging/markers.py b/pipenv/vendor/packaging/markers.py deleted file mode 100644 index 86582c84c8..0000000000 --- a/pipenv/vendor/packaging/markers.py +++ /dev/null @@ -1,304 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import operator -import os -import platform -import sys -from typing import Any, Callable, Dict, List, Optional, Tuple, Union - -from pipenv.vendor.pyparsing import ( # noqa: N817 - Forward, - Group, - Literal as L, - ParseException, - ParseResults, - QuotedString, - ZeroOrMore, - stringEnd, - stringStart, -) - -from .specifiers import InvalidSpecifier, Specifier - -__all__ = [ - "InvalidMarker", - "UndefinedComparison", - "UndefinedEnvironmentName", - "Marker", - "default_environment", -] - -Operator = Callable[[str, str], bool] - - -class InvalidMarker(ValueError): - """ - An invalid marker was found, users should refer to PEP 508. - """ - - -class UndefinedComparison(ValueError): - """ - An invalid operation was attempted on a value that doesn't support it. - """ - - -class UndefinedEnvironmentName(ValueError): - """ - A name was attempted to be used that does not exist inside of the - environment. - """ - - -class Node: - def __init__(self, value: Any) -> None: - self.value = value - - def __str__(self) -> str: - return str(self.value) - - def __repr__(self) -> str: - return f"<{self.__class__.__name__}('{self}')>" - - def serialize(self) -> str: - raise NotImplementedError - - -class Variable(Node): - def serialize(self) -> str: - return str(self) - - -class Value(Node): - def serialize(self) -> str: - return f'"{self}"' - - -class Op(Node): - def serialize(self) -> str: - return str(self) - - -VARIABLE = ( - L("implementation_version") - | L("platform_python_implementation") - | L("implementation_name") - | L("python_full_version") - | L("platform_release") - | L("platform_version") - | L("platform_machine") - | L("platform_system") - | L("python_version") - | L("sys_platform") - | L("os_name") - | L("os.name") # PEP-345 - | L("sys.platform") # PEP-345 - | L("platform.version") # PEP-345 - | L("platform.machine") # PEP-345 - | L("platform.python_implementation") # PEP-345 - | L("python_implementation") # undocumented setuptools legacy - | L("extra") # PEP-508 -) -ALIASES = { - "os.name": "os_name", - "sys.platform": "sys_platform", - "platform.version": "platform_version", - "platform.machine": "platform_machine", - "platform.python_implementation": "platform_python_implementation", - "python_implementation": "platform_python_implementation", -} -VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) - -VERSION_CMP = ( - L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") -) - -MARKER_OP = VERSION_CMP | L("not in") | L("in") -MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) - -MARKER_VALUE = QuotedString("'") | QuotedString('"') -MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) - -BOOLOP = L("and") | L("or") - -MARKER_VAR = VARIABLE | MARKER_VALUE - -MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) -MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) - -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() - -MARKER_EXPR = Forward() -MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) -MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) - -MARKER = stringStart + MARKER_EXPR + stringEnd - - -def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]: - if isinstance(results, ParseResults): - return [_coerce_parse_result(i) for i in results] - else: - return results - - -def _format_marker( - marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True -) -> str: - - assert isinstance(marker, (list, tuple, str)) - - # Sometimes we have a structure like [[...]] which is a single item list - # where the single item is itself it's own list. In that case we want skip - # the rest of this function so that we don't get extraneous () on the - # outside. - if ( - isinstance(marker, list) - and len(marker) == 1 - and isinstance(marker[0], (list, tuple)) - ): - return _format_marker(marker[0]) - - if isinstance(marker, list): - inner = (_format_marker(m, first=False) for m in marker) - if first: - return " ".join(inner) - else: - return "(" + " ".join(inner) + ")" - elif isinstance(marker, tuple): - return " ".join([m.serialize() for m in marker]) - else: - return marker - - -_operators: Dict[str, Operator] = { - "in": lambda lhs, rhs: lhs in rhs, - "not in": lambda lhs, rhs: lhs not in rhs, - "<": operator.lt, - "<=": operator.le, - "==": operator.eq, - "!=": operator.ne, - ">=": operator.ge, - ">": operator.gt, -} - - -def _eval_op(lhs: str, op: Op, rhs: str) -> bool: - try: - spec = Specifier("".join([op.serialize(), rhs])) - except InvalidSpecifier: - pass - else: - return spec.contains(lhs) - - oper: Optional[Operator] = _operators.get(op.serialize()) - if oper is None: - raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") - - return oper(lhs, rhs) - - -class Undefined: - pass - - -_undefined = Undefined() - - -def _get_env(environment: Dict[str, str], name: str) -> str: - value: Union[str, Undefined] = environment.get(name, _undefined) - - if isinstance(value, Undefined): - raise UndefinedEnvironmentName( - f"{name!r} does not exist in evaluation environment." - ) - - return value - - -def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool: - groups: List[List[bool]] = [[]] - - for marker in markers: - assert isinstance(marker, (list, tuple, str)) - - if isinstance(marker, list): - groups[-1].append(_evaluate_markers(marker, environment)) - elif isinstance(marker, tuple): - lhs, op, rhs = marker - - if isinstance(lhs, Variable): - lhs_value = _get_env(environment, lhs.value) - rhs_value = rhs.value - else: - lhs_value = lhs.value - rhs_value = _get_env(environment, rhs.value) - - groups[-1].append(_eval_op(lhs_value, op, rhs_value)) - else: - assert marker in ["and", "or"] - if marker == "or": - groups.append([]) - - return any(all(item) for item in groups) - - -def format_full_version(info: "sys._version_info") -> str: - version = "{0.major}.{0.minor}.{0.micro}".format(info) - kind = info.releaselevel - if kind != "final": - version += kind[0] + str(info.serial) - return version - - -def default_environment() -> Dict[str, str]: - iver = format_full_version(sys.implementation.version) - implementation_name = sys.implementation.name - return { - "implementation_name": implementation_name, - "implementation_version": iver, - "os_name": os.name, - "platform_machine": platform.machine(), - "platform_release": platform.release(), - "platform_system": platform.system(), - "platform_version": platform.version(), - "python_full_version": platform.python_version(), - "platform_python_implementation": platform.python_implementation(), - "python_version": ".".join(platform.python_version_tuple()[:2]), - "sys_platform": sys.platform, - } - - -class Marker: - def __init__(self, marker: str) -> None: - try: - self._markers = _coerce_parse_result(MARKER.parseString(marker)) - except ParseException as e: - raise InvalidMarker( - f"Invalid marker: {marker!r}, parse error at " - f"{marker[e.loc : e.loc + 8]!r}" - ) - - def __str__(self) -> str: - return _format_marker(self._markers) - - def __repr__(self) -> str: - return f"" - - def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: - """Evaluate a marker. - - Return the boolean from evaluating the given marker against the - environment. environment is an optional argument to override all or - part of the determined environment. - - The environment is determined from the current Python process. - """ - current_environment = default_environment() - if environment is not None: - current_environment.update(environment) - - return _evaluate_markers(self._markers, current_environment) diff --git a/pipenv/vendor/packaging/py.typed b/pipenv/vendor/packaging/py.typed deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/pipenv/vendor/packaging/requirements.py b/pipenv/vendor/packaging/requirements.py deleted file mode 100644 index 284c2e70e5..0000000000 --- a/pipenv/vendor/packaging/requirements.py +++ /dev/null @@ -1,146 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import re -import string -import urllib.parse -from typing import List, Optional as TOptional, Set - -from pipenv.vendor.pyparsing import ( # noqa - Combine, - Literal as L, - Optional, - ParseException, - Regex, - Word, - ZeroOrMore, - originalTextFor, - stringEnd, - stringStart, -) - -from .markers import MARKER_EXPR, Marker -from .specifiers import LegacySpecifier, Specifier, SpecifierSet - - -class InvalidRequirement(ValueError): - """ - An invalid requirement was found, users should refer to PEP 508. - """ - - -ALPHANUM = Word(string.ascii_letters + string.digits) - -LBRACKET = L("[").suppress() -RBRACKET = L("]").suppress() -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() -COMMA = L(",").suppress() -SEMICOLON = L(";").suppress() -AT = L("@").suppress() - -PUNCTUATION = Word("-_.") -IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) -IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) - -NAME = IDENTIFIER("name") -EXTRA = IDENTIFIER - -URI = Regex(r"[^ ]+")("url") -URL = AT + URI - -EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) -EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") - -VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) -VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) - -VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY -VERSION_MANY = Combine( - VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False -)("_raw_spec") -_VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY) -_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") - -VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") -VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) - -MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") -MARKER_EXPR.setParseAction( - lambda s, l, t: Marker(s[t._original_start : t._original_end]) -) -MARKER_SEPARATOR = SEMICOLON -MARKER = MARKER_SEPARATOR + MARKER_EXPR - -VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) -URL_AND_MARKER = URL + Optional(MARKER) - -NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) - -REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd -# pyparsing isn't thread safe during initialization, so we do it eagerly, see -# issue #104 -REQUIREMENT.parseString("x[]") - - -class Requirement: - """Parse a requirement. - - Parse a given requirement string into its parts, such as name, specifier, - URL, and extras. Raises InvalidRequirement on a badly-formed requirement - string. - """ - - # TODO: Can we test whether something is contained within a requirement? - # If so how do we do that? Do we need to test against the _name_ of - # the thing as well as the version? What about the markers? - # TODO: Can we normalize the name and extra name? - - def __init__(self, requirement_string: str) -> None: - try: - req = REQUIREMENT.parseString(requirement_string) - except ParseException as e: - raise InvalidRequirement( - f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}' - ) - - self.name: str = req.name - if req.url: - parsed_url = urllib.parse.urlparse(req.url) - if parsed_url.scheme == "file": - if urllib.parse.urlunparse(parsed_url) != req.url: - raise InvalidRequirement("Invalid URL given") - elif not (parsed_url.scheme and parsed_url.netloc) or ( - not parsed_url.scheme and not parsed_url.netloc - ): - raise InvalidRequirement(f"Invalid URL: {req.url}") - self.url: TOptional[str] = req.url - else: - self.url = None - self.extras: Set[str] = set(req.extras.asList() if req.extras else []) - self.specifier: SpecifierSet = SpecifierSet(req.specifier) - self.marker: TOptional[Marker] = req.marker if req.marker else None - - def __str__(self) -> str: - parts: List[str] = [self.name] - - if self.extras: - formatted_extras = ",".join(sorted(self.extras)) - parts.append(f"[{formatted_extras}]") - - if self.specifier: - parts.append(str(self.specifier)) - - if self.url: - parts.append(f"@ {self.url}") - if self.marker: - parts.append(" ") - - if self.marker: - parts.append(f"; {self.marker}") - - return "".join(parts) - - def __repr__(self) -> str: - return f"" diff --git a/pipenv/vendor/packaging/specifiers.py b/pipenv/vendor/packaging/specifiers.py deleted file mode 100644 index 0e218a6f9f..0000000000 --- a/pipenv/vendor/packaging/specifiers.py +++ /dev/null @@ -1,802 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import abc -import functools -import itertools -import re -import warnings -from typing import ( - Callable, - Dict, - Iterable, - Iterator, - List, - Optional, - Pattern, - Set, - Tuple, - TypeVar, - Union, -) - -from .utils import canonicalize_version -from .version import LegacyVersion, Version, parse - -ParsedVersion = Union[Version, LegacyVersion] -UnparsedVersion = Union[Version, LegacyVersion, str] -VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion) -CallableOperator = Callable[[ParsedVersion, str], bool] - - -class InvalidSpecifier(ValueError): - """ - An invalid specifier was found, users should refer to PEP 440. - """ - - -class BaseSpecifier(metaclass=abc.ABCMeta): - @abc.abstractmethod - def __str__(self) -> str: - """ - Returns the str representation of this Specifier like object. This - should be representative of the Specifier itself. - """ - - @abc.abstractmethod - def __hash__(self) -> int: - """ - Returns a hash value for this Specifier like object. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Returns a boolean representing whether or not the two Specifier like - objects are equal. - """ - - @abc.abstractproperty - def prereleases(self) -> Optional[bool]: - """ - Returns whether or not pre-releases as a whole are allowed by this - specifier. - """ - - @prereleases.setter - def prereleases(self, value: bool) -> None: - """ - Sets whether or not pre-releases as a whole are allowed by this - specifier. - """ - - @abc.abstractmethod - def contains(self, item: str, prereleases: Optional[bool] = None) -> bool: - """ - Determines if the given item is contained within this specifier. - """ - - @abc.abstractmethod - def filter( - self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None - ) -> Iterable[VersionTypeVar]: - """ - Takes an iterable of items and filters them so that only items which - are contained within this specifier are allowed in it. - """ - - -class _IndividualSpecifier(BaseSpecifier): - - _operators: Dict[str, str] = {} - _regex: Pattern[str] - - def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: - match = self._regex.search(spec) - if not match: - raise InvalidSpecifier(f"Invalid specifier: '{spec}'") - - self._spec: Tuple[str, str] = ( - match.group("operator").strip(), - match.group("version").strip(), - ) - - # Store whether or not this Specifier should accept prereleases - self._prereleases = prereleases - - def __repr__(self) -> str: - pre = ( - f", prereleases={self.prereleases!r}" - if self._prereleases is not None - else "" - ) - - return f"<{self.__class__.__name__}({str(self)!r}{pre})>" - - def __str__(self) -> str: - return "{}{}".format(*self._spec) - - @property - def _canonical_spec(self) -> Tuple[str, str]: - return self._spec[0], canonicalize_version(self._spec[1]) - - def __hash__(self) -> int: - return hash(self._canonical_spec) - - def __eq__(self, other: object) -> bool: - if isinstance(other, str): - try: - other = self.__class__(str(other)) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._canonical_spec == other._canonical_spec - - def _get_operator(self, op: str) -> CallableOperator: - operator_callable: CallableOperator = getattr( - self, f"_compare_{self._operators[op]}" - ) - return operator_callable - - def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion: - if not isinstance(version, (LegacyVersion, Version)): - version = parse(version) - return version - - @property - def operator(self) -> str: - return self._spec[0] - - @property - def version(self) -> str: - return self._spec[1] - - @property - def prereleases(self) -> Optional[bool]: - return self._prereleases - - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value - - def __contains__(self, item: str) -> bool: - return self.contains(item) - - def contains( - self, item: UnparsedVersion, prereleases: Optional[bool] = None - ) -> bool: - - # Determine if prereleases are to be allowed or not. - if prereleases is None: - prereleases = self.prereleases - - # Normalize item to a Version or LegacyVersion, this allows us to have - # a shortcut for ``"2.0" in Specifier(">=2") - normalized_item = self._coerce_version(item) - - # Determine if we should be supporting prereleases in this specifier - # or not, if we do not support prereleases than we can short circuit - # logic if this version is a prereleases. - if normalized_item.is_prerelease and not prereleases: - return False - - # Actually do the comparison to determine if this item is contained - # within this Specifier or not. - operator_callable: CallableOperator = self._get_operator(self.operator) - return operator_callable(normalized_item, self.version) - - def filter( - self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None - ) -> Iterable[VersionTypeVar]: - - yielded = False - found_prereleases = [] - - kw = {"prereleases": prereleases if prereleases is not None else True} - - # Attempt to iterate over all the values in the iterable and if any of - # them match, yield them. - for version in iterable: - parsed_version = self._coerce_version(version) - - if self.contains(parsed_version, **kw): - # If our version is a prerelease, and we were not set to allow - # prereleases, then we'll store it for later in case nothing - # else matches this specifier. - if parsed_version.is_prerelease and not ( - prereleases or self.prereleases - ): - found_prereleases.append(version) - # Either this is not a prerelease, or we should have been - # accepting prereleases from the beginning. - else: - yielded = True - yield version - - # Now that we've iterated over everything, determine if we've yielded - # any values, and if we have not and we have any prereleases stored up - # then we will go ahead and yield the prereleases. - if not yielded and found_prereleases: - for version in found_prereleases: - yield version - - -class LegacySpecifier(_IndividualSpecifier): - - _regex_str = r""" - (?P(==|!=|<=|>=|<|>)) - \s* - (?P - [^,;\s)]* # Since this is a "legacy" specifier, and the version - # string can be just about anything, we match everything - # except for whitespace, a semi-colon for marker support, - # a closing paren since versions can be enclosed in - # them, and a comma since it's a version separator. - ) - """ - - _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) - - _operators = { - "==": "equal", - "!=": "not_equal", - "<=": "less_than_equal", - ">=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - } - - def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: - super().__init__(spec, prereleases) - - warnings.warn( - "Creating a LegacyVersion has been deprecated and will be " - "removed in the next major release", - DeprecationWarning, - ) - - def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion: - if not isinstance(version, LegacyVersion): - version = LegacyVersion(str(version)) - return version - - def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective == self._coerce_version(spec) - - def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective != self._coerce_version(spec) - - def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective <= self._coerce_version(spec) - - def _compare_greater_than_equal( - self, prospective: LegacyVersion, spec: str - ) -> bool: - return prospective >= self._coerce_version(spec) - - def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective < self._coerce_version(spec) - - def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool: - return prospective > self._coerce_version(spec) - - -def _require_version_compare( - fn: Callable[["Specifier", ParsedVersion, str], bool] -) -> Callable[["Specifier", ParsedVersion, str], bool]: - @functools.wraps(fn) - def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool: - if not isinstance(prospective, Version): - return False - return fn(self, prospective, spec) - - return wrapped - - -class Specifier(_IndividualSpecifier): - - _regex_str = r""" - (?P(~=|==|!=|<=|>=|<|>|===)) - (?P - (?: - # The identity operators allow for an escape hatch that will - # do an exact string match of the version you wish to install. - # This will not be parsed by PEP 440 and we cannot determine - # any semantic meaning from it. This operator is discouraged - # but included entirely as an escape hatch. - (?<====) # Only match for the identity operator - \s* - [^\s]* # We just match everything, except for whitespace - # since we are only testing for strict identity. - ) - | - (?: - # The (non)equality operators allow for wild card and local - # versions to be specified so we have to define these two - # operators separately to enable that. - (?<===|!=) # Only match for equals and not equals - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)* # release - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - - # You cannot use a wild card and a dev or local version - # together so group them with a | and make them optional. - (?: - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local - | - \.\* # Wild card syntax of .* - )? - ) - | - (?: - # The compatible operator requires at least two digits in the - # release segment. - (?<=~=) # Only match for the compatible operator - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - ) - | - (?: - # All other operators only allow a sub set of what the - # (non)equality operators do. Specifically they do not allow - # local versions to be specified nor do they allow the prefix - # matching wild cards. - (?=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - "===": "arbitrary", - } - - @_require_version_compare - def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool: - - # Compatible releases have an equivalent combination of >= and ==. That - # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to - # implement this in terms of the other specifiers instead of - # implementing it ourselves. The only thing we need to do is construct - # the other specifiers. - - # We want everything but the last item in the version, but we want to - # ignore suffix segments. - prefix = ".".join( - list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] - ) - - # Add the prefix notation to the end of our string - prefix += ".*" - - return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( - prospective, prefix - ) - - @_require_version_compare - def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool: - - # We need special logic to handle prefix matching - if spec.endswith(".*"): - # In the case of prefix matching we want to ignore local segment. - prospective = Version(prospective.public) - # Split the spec out by dots, and pretend that there is an implicit - # dot in between a release segment and a pre-release segment. - split_spec = _version_split(spec[:-2]) # Remove the trailing .* - - # Split the prospective version out by dots, and pretend that there - # is an implicit dot in between a release segment and a pre-release - # segment. - split_prospective = _version_split(str(prospective)) - - # Shorten the prospective version to be the same length as the spec - # so that we can determine if the specifier is a prefix of the - # prospective version or not. - shortened_prospective = split_prospective[: len(split_spec)] - - # Pad out our two sides with zeros so that they both equal the same - # length. - padded_spec, padded_prospective = _pad_version( - split_spec, shortened_prospective - ) - - return padded_prospective == padded_spec - else: - # Convert our spec string into a Version - spec_version = Version(spec) - - # If the specifier does not have a local segment, then we want to - # act as if the prospective version also does not have a local - # segment. - if not spec_version.local: - prospective = Version(prospective.public) - - return prospective == spec_version - - @_require_version_compare - def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool: - return not self._compare_equal(prospective, spec) - - @_require_version_compare - def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool: - - # NB: Local version identifiers are NOT permitted in the version - # specifier, so local version labels can be universally removed from - # the prospective version. - return Version(prospective.public) <= Version(spec) - - @_require_version_compare - def _compare_greater_than_equal( - self, prospective: ParsedVersion, spec: str - ) -> bool: - - # NB: Local version identifiers are NOT permitted in the version - # specifier, so local version labels can be universally removed from - # the prospective version. - return Version(prospective.public) >= Version(spec) - - @_require_version_compare - def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool: - - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec_str) - - # Check to see if the prospective version is less than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective < spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a pre-release version, that we do not accept pre-release - # versions for the version mentioned in the specifier (e.g. <3.1 should - # not match 3.1.dev0, but should match 3.0.dev0). - if not spec.is_prerelease and prospective.is_prerelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # less than the spec version *and* it's not a pre-release of the same - # version in the spec. - return True - - @_require_version_compare - def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool: - - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec_str) - - # Check to see if the prospective version is greater than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective > spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a post-release version, that we do not accept - # post-release versions for the version mentioned in the specifier - # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). - if not spec.is_postrelease and prospective.is_postrelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # Ensure that we do not allow a local version of the version mentioned - # in the specifier, which is technically greater than, to match. - if prospective.local is not None: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # greater than the spec version *and* it's not a pre-release of the - # same version in the spec. - return True - - def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: - return str(prospective).lower() == str(spec).lower() - - @property - def prereleases(self) -> bool: - - # If there is an explicit prereleases set for this, then we'll just - # blindly use that. - if self._prereleases is not None: - return self._prereleases - - # Look at all of our specifiers and determine if they are inclusive - # operators, and if they are if they are including an explicit - # prerelease. - operator, version = self._spec - if operator in ["==", ">=", "<=", "~=", "==="]: - # The == specifier can include a trailing .*, if it does we - # want to remove before parsing. - if operator == "==" and version.endswith(".*"): - version = version[:-2] - - # Parse the version, and if it is a pre-release than this - # specifier allows pre-releases. - if parse(version).is_prerelease: - return True - - return False - - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value - - -_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") - - -def _version_split(version: str) -> List[str]: - result: List[str] = [] - for item in version.split("."): - match = _prefix_regex.search(item) - if match: - result.extend(match.groups()) - else: - result.append(item) - return result - - -def _is_not_suffix(segment: str) -> bool: - return not any( - segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") - ) - - -def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]: - left_split, right_split = [], [] - - # Get the release segment of our versions - left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) - right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) - - # Get the rest of our versions - left_split.append(left[len(left_split[0]) :]) - right_split.append(right[len(right_split[0]) :]) - - # Insert our padding - left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) - right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) - - return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split))) - - -class SpecifierSet(BaseSpecifier): - def __init__( - self, specifiers: str = "", prereleases: Optional[bool] = None - ) -> None: - - # Split on , to break each individual specifier into it's own item, and - # strip each item to remove leading/trailing whitespace. - split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] - - # Parsed each individual specifier, attempting first to make it a - # Specifier and falling back to a LegacySpecifier. - parsed: Set[_IndividualSpecifier] = set() - for specifier in split_specifiers: - try: - parsed.add(Specifier(specifier)) - except InvalidSpecifier: - parsed.add(LegacySpecifier(specifier)) - - # Turn our parsed specifiers into a frozen set and save them for later. - self._specs = frozenset(parsed) - - # Store our prereleases value so we can use it later to determine if - # we accept prereleases or not. - self._prereleases = prereleases - - def __repr__(self) -> str: - pre = ( - f", prereleases={self.prereleases!r}" - if self._prereleases is not None - else "" - ) - - return f"" - - def __str__(self) -> str: - return ",".join(sorted(str(s) for s in self._specs)) - - def __hash__(self) -> int: - return hash(self._specs) - - def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": - if isinstance(other, str): - other = SpecifierSet(other) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - specifier = SpecifierSet() - specifier._specs = frozenset(self._specs | other._specs) - - if self._prereleases is None and other._prereleases is not None: - specifier._prereleases = other._prereleases - elif self._prereleases is not None and other._prereleases is None: - specifier._prereleases = self._prereleases - elif self._prereleases == other._prereleases: - specifier._prereleases = self._prereleases - else: - raise ValueError( - "Cannot combine SpecifierSets with True and False prerelease " - "overrides." - ) - - return specifier - - def __eq__(self, other: object) -> bool: - if isinstance(other, (str, _IndividualSpecifier)): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs == other._specs - - def __len__(self) -> int: - return len(self._specs) - - def __iter__(self) -> Iterator[_IndividualSpecifier]: - return iter(self._specs) - - @property - def prereleases(self) -> Optional[bool]: - - # If we have been given an explicit prerelease modifier, then we'll - # pass that through here. - if self._prereleases is not None: - return self._prereleases - - # If we don't have any specifiers, and we don't have a forced value, - # then we'll just return None since we don't know if this should have - # pre-releases or not. - if not self._specs: - return None - - # Otherwise we'll see if any of the given specifiers accept - # prereleases, if any of them do we'll return True, otherwise False. - return any(s.prereleases for s in self._specs) - - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value - - def __contains__(self, item: UnparsedVersion) -> bool: - return self.contains(item) - - def contains( - self, item: UnparsedVersion, prereleases: Optional[bool] = None - ) -> bool: - - # Ensure that our item is a Version or LegacyVersion instance. - if not isinstance(item, (LegacyVersion, Version)): - item = parse(item) - - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # We can determine if we're going to allow pre-releases by looking to - # see if any of the underlying items supports them. If none of them do - # and this item is a pre-release then we do not allow it and we can - # short circuit that here. - # Note: This means that 1.0.dev1 would not be contained in something - # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 - if not prereleases and item.is_prerelease: - return False - - # We simply dispatch to the underlying specs here to make sure that the - # given version is contained within all of them. - # Note: This use of all() here means that an empty set of specifiers - # will always return True, this is an explicit design decision. - return all(s.contains(item, prereleases=prereleases) for s in self._specs) - - def filter( - self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None - ) -> Iterable[VersionTypeVar]: - - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # If we have any specifiers, then we want to wrap our iterable in the - # filter method for each one, this will act as a logical AND amongst - # each specifier. - if self._specs: - for spec in self._specs: - iterable = spec.filter(iterable, prereleases=bool(prereleases)) - return iterable - # If we do not have any specifiers, then we need to have a rough filter - # which will filter out any pre-releases, unless there are no final - # releases, and which will filter out LegacyVersion in general. - else: - filtered: List[VersionTypeVar] = [] - found_prereleases: List[VersionTypeVar] = [] - - item: UnparsedVersion - parsed_version: Union[Version, LegacyVersion] - - for item in iterable: - # Ensure that we some kind of Version class for this item. - if not isinstance(item, (LegacyVersion, Version)): - parsed_version = parse(item) - else: - parsed_version = item - - # Filter out any item which is parsed as a LegacyVersion - if isinstance(parsed_version, LegacyVersion): - continue - - # Store any item which is a pre-release for later unless we've - # already found a final version or we are accepting prereleases - if parsed_version.is_prerelease and not prereleases: - if not filtered: - found_prereleases.append(item) - else: - filtered.append(item) - - # If we've found no items except for pre-releases, then we'll go - # ahead and use the pre-releases - if not filtered and found_prereleases and prereleases is None: - return found_prereleases - - return filtered diff --git a/pipenv/vendor/packaging/tags.py b/pipenv/vendor/packaging/tags.py deleted file mode 100644 index 9a3d25a71c..0000000000 --- a/pipenv/vendor/packaging/tags.py +++ /dev/null @@ -1,487 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import logging -import platform -import sys -import sysconfig -from importlib.machinery import EXTENSION_SUFFIXES -from typing import ( - Dict, - FrozenSet, - Iterable, - Iterator, - List, - Optional, - Sequence, - Tuple, - Union, - cast, -) - -from . import _manylinux, _musllinux - -logger = logging.getLogger(__name__) - -PythonVersion = Sequence[int] -MacVersion = Tuple[int, int] - -INTERPRETER_SHORT_NAMES: Dict[str, str] = { - "python": "py", # Generic. - "cpython": "cp", - "pypy": "pp", - "ironpython": "ip", - "jython": "jy", -} - - -_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 - - -class Tag: - """ - A representation of the tag triple for a wheel. - - Instances are considered immutable and thus are hashable. Equality checking - is also supported. - """ - - __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] - - def __init__(self, interpreter: str, abi: str, platform: str) -> None: - self._interpreter = interpreter.lower() - self._abi = abi.lower() - self._platform = platform.lower() - # The __hash__ of every single element in a Set[Tag] will be evaluated each time - # that a set calls its `.disjoint()` method, which may be called hundreds of - # times when scanning a page of links for packages with tags matching that - # Set[Tag]. Pre-computing the value here produces significant speedups for - # downstream consumers. - self._hash = hash((self._interpreter, self._abi, self._platform)) - - @property - def interpreter(self) -> str: - return self._interpreter - - @property - def abi(self) -> str: - return self._abi - - @property - def platform(self) -> str: - return self._platform - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Tag): - return NotImplemented - - return ( - (self._hash == other._hash) # Short-circuit ASAP for perf reasons. - and (self._platform == other._platform) - and (self._abi == other._abi) - and (self._interpreter == other._interpreter) - ) - - def __hash__(self) -> int: - return self._hash - - def __str__(self) -> str: - return f"{self._interpreter}-{self._abi}-{self._platform}" - - def __repr__(self) -> str: - return f"<{self} @ {id(self)}>" - - -def parse_tag(tag: str) -> FrozenSet[Tag]: - """ - Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. - - Returning a set is required due to the possibility that the tag is a - compressed tag set. - """ - tags = set() - interpreters, abis, platforms = tag.split("-") - for interpreter in interpreters.split("."): - for abi in abis.split("."): - for platform_ in platforms.split("."): - tags.add(Tag(interpreter, abi, platform_)) - return frozenset(tags) - - -def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]: - value = sysconfig.get_config_var(name) - if value is None and warn: - logger.debug( - "Config variable '%s' is unset, Python ABI tag may be incorrect", name - ) - return value - - -def _normalize_string(string: str) -> str: - return string.replace(".", "_").replace("-", "_") - - -def _abi3_applies(python_version: PythonVersion) -> bool: - """ - Determine if the Python version supports abi3. - - PEP 384 was first implemented in Python 3.2. - """ - return len(python_version) > 1 and tuple(python_version) >= (3, 2) - - -def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: - py_version = tuple(py_version) # To allow for version comparison. - abis = [] - version = _version_nodot(py_version[:2]) - debug = pymalloc = ucs4 = "" - with_debug = _get_config_var("Py_DEBUG", warn) - has_refcount = hasattr(sys, "gettotalrefcount") - # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled - # extension modules is the best option. - # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 - has_ext = "_d.pyd" in EXTENSION_SUFFIXES - if with_debug or (with_debug is None and (has_refcount or has_ext)): - debug = "d" - if py_version < (3, 8): - with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) - if with_pymalloc or with_pymalloc is None: - pymalloc = "m" - if py_version < (3, 3): - unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) - if unicode_size == 4 or ( - unicode_size is None and sys.maxunicode == 0x10FFFF - ): - ucs4 = "u" - elif debug: - # Debug builds can also load "normal" extension modules. - # We can also assume no UCS-4 or pymalloc requirement. - abis.append(f"cp{version}") - abis.insert( - 0, - "cp{version}{debug}{pymalloc}{ucs4}".format( - version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4 - ), - ) - return abis - - -def cpython_tags( - python_version: Optional[PythonVersion] = None, - abis: Optional[Iterable[str]] = None, - platforms: Optional[Iterable[str]] = None, - *, - warn: bool = False, -) -> Iterator[Tag]: - """ - Yields the tags for a CPython interpreter. - - The tags consist of: - - cp-- - - cp-abi3- - - cp-none- - - cp-abi3- # Older Python versions down to 3.2. - - If python_version only specifies a major version then user-provided ABIs and - the 'none' ABItag will be used. - - If 'abi3' or 'none' are specified in 'abis' then they will be yielded at - their normal position and not at the beginning. - """ - if not python_version: - python_version = sys.version_info[:2] - - interpreter = f"cp{_version_nodot(python_version[:2])}" - - if abis is None: - if len(python_version) > 1: - abis = _cpython_abis(python_version, warn) - else: - abis = [] - abis = list(abis) - # 'abi3' and 'none' are explicitly handled later. - for explicit_abi in ("abi3", "none"): - try: - abis.remove(explicit_abi) - except ValueError: - pass - - platforms = list(platforms or platform_tags()) - for abi in abis: - for platform_ in platforms: - yield Tag(interpreter, abi, platform_) - if _abi3_applies(python_version): - yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) - yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) - - if _abi3_applies(python_version): - for minor_version in range(python_version[1] - 1, 1, -1): - for platform_ in platforms: - interpreter = "cp{version}".format( - version=_version_nodot((python_version[0], minor_version)) - ) - yield Tag(interpreter, "abi3", platform_) - - -def _generic_abi() -> Iterator[str]: - abi = sysconfig.get_config_var("SOABI") - if abi: - yield _normalize_string(abi) - - -def generic_tags( - interpreter: Optional[str] = None, - abis: Optional[Iterable[str]] = None, - platforms: Optional[Iterable[str]] = None, - *, - warn: bool = False, -) -> Iterator[Tag]: - """ - Yields the tags for a generic interpreter. - - The tags consist of: - - -- - - The "none" ABI will be added if it was not explicitly provided. - """ - if not interpreter: - interp_name = interpreter_name() - interp_version = interpreter_version(warn=warn) - interpreter = "".join([interp_name, interp_version]) - if abis is None: - abis = _generic_abi() - platforms = list(platforms or platform_tags()) - abis = list(abis) - if "none" not in abis: - abis.append("none") - for abi in abis: - for platform_ in platforms: - yield Tag(interpreter, abi, platform_) - - -def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: - """ - Yields Python versions in descending order. - - After the latest version, the major-only version will be yielded, and then - all previous versions of that major version. - """ - if len(py_version) > 1: - yield f"py{_version_nodot(py_version[:2])}" - yield f"py{py_version[0]}" - if len(py_version) > 1: - for minor in range(py_version[1] - 1, -1, -1): - yield f"py{_version_nodot((py_version[0], minor))}" - - -def compatible_tags( - python_version: Optional[PythonVersion] = None, - interpreter: Optional[str] = None, - platforms: Optional[Iterable[str]] = None, -) -> Iterator[Tag]: - """ - Yields the sequence of tags that are compatible with a specific version of Python. - - The tags consist of: - - py*-none- - - -none-any # ... if `interpreter` is provided. - - py*-none-any - """ - if not python_version: - python_version = sys.version_info[:2] - platforms = list(platforms or platform_tags()) - for version in _py_interpreter_range(python_version): - for platform_ in platforms: - yield Tag(version, "none", platform_) - if interpreter: - yield Tag(interpreter, "none", "any") - for version in _py_interpreter_range(python_version): - yield Tag(version, "none", "any") - - -def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: - if not is_32bit: - return arch - - if arch.startswith("ppc"): - return "ppc" - - return "i386" - - -def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: - formats = [cpu_arch] - if cpu_arch == "x86_64": - if version < (10, 4): - return [] - formats.extend(["intel", "fat64", "fat32"]) - - elif cpu_arch == "i386": - if version < (10, 4): - return [] - formats.extend(["intel", "fat32", "fat"]) - - elif cpu_arch == "ppc64": - # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? - if version > (10, 5) or version < (10, 4): - return [] - formats.append("fat64") - - elif cpu_arch == "ppc": - if version > (10, 6): - return [] - formats.extend(["fat32", "fat"]) - - if cpu_arch in {"arm64", "x86_64"}: - formats.append("universal2") - - if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: - formats.append("universal") - - return formats - - -def mac_platforms( - version: Optional[MacVersion] = None, arch: Optional[str] = None -) -> Iterator[str]: - """ - Yields the platform tags for a macOS system. - - The `version` parameter is a two-item tuple specifying the macOS version to - generate platform tags for. The `arch` parameter is the CPU architecture to - generate platform tags for. Both parameters default to the appropriate value - for the current system. - """ - version_str, _, cpu_arch = platform.mac_ver() - if version is None: - version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) - else: - version = version - if arch is None: - arch = _mac_arch(cpu_arch) - else: - arch = arch - - if (10, 0) <= version and version < (11, 0): - # Prior to Mac OS 11, each yearly release of Mac OS bumped the - # "minor" version number. The major version was always 10. - for minor_version in range(version[1], -1, -1): - compat_version = 10, minor_version - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=10, minor=minor_version, binary_format=binary_format - ) - - if version >= (11, 0): - # Starting with Mac OS 11, each yearly release bumps the major version - # number. The minor versions are now the midyear updates. - for major_version in range(version[0], 10, -1): - compat_version = major_version, 0 - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=major_version, minor=0, binary_format=binary_format - ) - - if version >= (11, 0): - # Mac OS 11 on x86_64 is compatible with binaries from previous releases. - # Arm64 support was introduced in 11.0, so no Arm binaries from previous - # releases exist. - # - # However, the "universal2" binary format can have a - # macOS version earlier than 11.0 when the x86_64 part of the binary supports - # that version of macOS. - if arch == "x86_64": - for minor_version in range(16, 3, -1): - compat_version = 10, minor_version - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) - else: - for minor_version in range(16, 3, -1): - compat_version = 10, minor_version - binary_format = "universal2" - yield "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) - - -def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: - linux = _normalize_string(sysconfig.get_platform()) - if is_32bit: - if linux == "linux_x86_64": - linux = "linux_i686" - elif linux == "linux_aarch64": - linux = "linux_armv7l" - _, arch = linux.split("_", 1) - yield from _manylinux.platform_tags(linux, arch) - yield from _musllinux.platform_tags(arch) - yield linux - - -def _generic_platforms() -> Iterator[str]: - yield _normalize_string(sysconfig.get_platform()) - - -def platform_tags() -> Iterator[str]: - """ - Provides the platform tags for this installation. - """ - if platform.system() == "Darwin": - return mac_platforms() - elif platform.system() == "Linux": - return _linux_platforms() - else: - return _generic_platforms() - - -def interpreter_name() -> str: - """ - Returns the name of the running interpreter. - """ - name = sys.implementation.name - return INTERPRETER_SHORT_NAMES.get(name) or name - - -def interpreter_version(*, warn: bool = False) -> str: - """ - Returns the version of the running interpreter. - """ - version = _get_config_var("py_version_nodot", warn=warn) - if version: - version = str(version) - else: - version = _version_nodot(sys.version_info[:2]) - return version - - -def _version_nodot(version: PythonVersion) -> str: - return "".join(map(str, version)) - - -def sys_tags(*, warn: bool = False) -> Iterator[Tag]: - """ - Returns the sequence of tag triples for the running interpreter. - - The order of the sequence corresponds to priority order for the - interpreter, from most to least important. - """ - - interp_name = interpreter_name() - if interp_name == "cp": - yield from cpython_tags(warn=warn) - else: - yield from generic_tags() - - if interp_name == "pp": - yield from compatible_tags(interpreter="pp3") - else: - yield from compatible_tags() diff --git a/pipenv/vendor/packaging/utils.py b/pipenv/vendor/packaging/utils.py deleted file mode 100644 index bab11b80c6..0000000000 --- a/pipenv/vendor/packaging/utils.py +++ /dev/null @@ -1,136 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import re -from typing import FrozenSet, NewType, Tuple, Union, cast - -from .tags import Tag, parse_tag -from .version import InvalidVersion, Version - -BuildTag = Union[Tuple[()], Tuple[int, str]] -NormalizedName = NewType("NormalizedName", str) - - -class InvalidWheelFilename(ValueError): - """ - An invalid wheel filename was found, users should refer to PEP 427. - """ - - -class InvalidSdistFilename(ValueError): - """ - An invalid sdist filename was found, users should refer to the packaging user guide. - """ - - -_canonicalize_regex = re.compile(r"[-_.]+") -# PEP 427: The build number must start with a digit. -_build_tag_regex = re.compile(r"(\d+)(.*)") - - -def canonicalize_name(name: str) -> NormalizedName: - # This is taken from PEP 503. - value = _canonicalize_regex.sub("-", name).lower() - return cast(NormalizedName, value) - - -def canonicalize_version(version: Union[Version, str]) -> str: - """ - This is very similar to Version.__str__, but has one subtle difference - with the way it handles the release segment. - """ - if isinstance(version, str): - try: - parsed = Version(version) - except InvalidVersion: - # Legacy versions cannot be normalized - return version - else: - parsed = version - - parts = [] - - # Epoch - if parsed.epoch != 0: - parts.append(f"{parsed.epoch}!") - - # Release segment - # NB: This strips trailing '.0's to normalize - parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release))) - - # Pre-release - if parsed.pre is not None: - parts.append("".join(str(x) for x in parsed.pre)) - - # Post-release - if parsed.post is not None: - parts.append(f".post{parsed.post}") - - # Development release - if parsed.dev is not None: - parts.append(f".dev{parsed.dev}") - - # Local version segment - if parsed.local is not None: - parts.append(f"+{parsed.local}") - - return "".join(parts) - - -def parse_wheel_filename( - filename: str, -) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]: - if not filename.endswith(".whl"): - raise InvalidWheelFilename( - f"Invalid wheel filename (extension must be '.whl'): {filename}" - ) - - filename = filename[:-4] - dashes = filename.count("-") - if dashes not in (4, 5): - raise InvalidWheelFilename( - f"Invalid wheel filename (wrong number of parts): {filename}" - ) - - parts = filename.split("-", dashes - 2) - name_part = parts[0] - # See PEP 427 for the rules on escaping the project name - if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: - raise InvalidWheelFilename(f"Invalid project name: {filename}") - name = canonicalize_name(name_part) - version = Version(parts[1]) - if dashes == 5: - build_part = parts[2] - build_match = _build_tag_regex.match(build_part) - if build_match is None: - raise InvalidWheelFilename( - f"Invalid build number: {build_part} in '{filename}'" - ) - build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) - else: - build = () - tags = parse_tag(parts[-1]) - return (name, version, build, tags) - - -def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]: - if filename.endswith(".tar.gz"): - file_stem = filename[: -len(".tar.gz")] - elif filename.endswith(".zip"): - file_stem = filename[: -len(".zip")] - else: - raise InvalidSdistFilename( - f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" - f" {filename}" - ) - - # We are requiring a PEP 440 version, which cannot contain dashes, - # so we split on the last dash. - name_part, sep, version_part = file_stem.rpartition("-") - if not sep: - raise InvalidSdistFilename(f"Invalid sdist filename: {filename}") - - name = canonicalize_name(name_part) - version = Version(version_part) - return (name, version) diff --git a/pipenv/vendor/packaging/version.py b/pipenv/vendor/packaging/version.py deleted file mode 100644 index de9a09a4ed..0000000000 --- a/pipenv/vendor/packaging/version.py +++ /dev/null @@ -1,504 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import collections -import itertools -import re -import warnings -from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union - -from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType - -__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] - -InfiniteTypes = Union[InfinityType, NegativeInfinityType] -PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] -SubLocalType = Union[InfiniteTypes, int, str] -LocalType = Union[ - NegativeInfinityType, - Tuple[ - Union[ - SubLocalType, - Tuple[SubLocalType, str], - Tuple[NegativeInfinityType, SubLocalType], - ], - ..., - ], -] -CmpKey = Tuple[ - int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType -] -LegacyCmpKey = Tuple[int, Tuple[str, ...]] -VersionComparisonMethod = Callable[ - [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool -] - -_Version = collections.namedtuple( - "_Version", ["epoch", "release", "dev", "pre", "post", "local"] -) - - -def parse(version: str) -> Union["LegacyVersion", "Version"]: - """ - Parse the given version string and return either a :class:`Version` object - or a :class:`LegacyVersion` object depending on if the given version is - a valid PEP 440 version or a legacy version. - """ - try: - return Version(version) - except InvalidVersion: - return LegacyVersion(version) - - -class InvalidVersion(ValueError): - """ - An invalid version was found, users should refer to PEP 440. - """ - - -class _BaseVersion: - _key: Union[CmpKey, LegacyCmpKey] - - def __hash__(self) -> int: - return hash(self._key) - - # Please keep the duplicated `isinstance` check - # in the six comparisons hereunder - # unless you find a way to avoid adding overhead function calls. - def __lt__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key < other._key - - def __le__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key <= other._key - - def __eq__(self, other: object) -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key == other._key - - def __ge__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key >= other._key - - def __gt__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key > other._key - - def __ne__(self, other: object) -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key != other._key - - -class LegacyVersion(_BaseVersion): - def __init__(self, version: str) -> None: - self._version = str(version) - self._key = _legacy_cmpkey(self._version) - - warnings.warn( - "Creating a LegacyVersion has been deprecated and will be " - "removed in the next major release", - DeprecationWarning, - ) - - def __str__(self) -> str: - return self._version - - def __repr__(self) -> str: - return f"" - - @property - def public(self) -> str: - return self._version - - @property - def base_version(self) -> str: - return self._version - - @property - def epoch(self) -> int: - return -1 - - @property - def release(self) -> None: - return None - - @property - def pre(self) -> None: - return None - - @property - def post(self) -> None: - return None - - @property - def dev(self) -> None: - return None - - @property - def local(self) -> None: - return None - - @property - def is_prerelease(self) -> bool: - return False - - @property - def is_postrelease(self) -> bool: - return False - - @property - def is_devrelease(self) -> bool: - return False - - -_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) - -_legacy_version_replacement_map = { - "pre": "c", - "preview": "c", - "-": "final-", - "rc": "c", - "dev": "@", -} - - -def _parse_version_parts(s: str) -> Iterator[str]: - for part in _legacy_version_component_re.split(s): - part = _legacy_version_replacement_map.get(part, part) - - if not part or part == ".": - continue - - if part[:1] in "0123456789": - # pad for numeric comparison - yield part.zfill(8) - else: - yield "*" + part - - # ensure that alpha/beta/candidate are before final - yield "*final" - - -def _legacy_cmpkey(version: str) -> LegacyCmpKey: - - # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch - # greater than or equal to 0. This will effectively put the LegacyVersion, - # which uses the defacto standard originally implemented by setuptools, - # as before all PEP 440 versions. - epoch = -1 - - # This scheme is taken from pkg_resources.parse_version setuptools prior to - # it's adoption of the packaging library. - parts: List[str] = [] - for part in _parse_version_parts(version.lower()): - if part.startswith("*"): - # remove "-" before a prerelease tag - if part < "*final": - while parts and parts[-1] == "*final-": - parts.pop() - - # remove trailing zeros from each series of numeric parts - while parts and parts[-1] == "00000000": - parts.pop() - - parts.append(part) - - return epoch, tuple(parts) - - -# Deliberately not anchored to the start and end of the string, to make it -# easier for 3rd party code to reuse -VERSION_PATTERN = r""" - v? - (?: - (?:(?P[0-9]+)!)? # epoch - (?P[0-9]+(?:\.[0-9]+)*) # release segment - (?P
                                          # pre-release
-            [-_\.]?
-            (?P(a|b|c|rc|alpha|beta|pre|preview))
-            [-_\.]?
-            (?P[0-9]+)?
-        )?
-        (?P                                         # post release
-            (?:-(?P[0-9]+))
-            |
-            (?:
-                [-_\.]?
-                (?Ppost|rev|r)
-                [-_\.]?
-                (?P[0-9]+)?
-            )
-        )?
-        (?P                                          # dev release
-            [-_\.]?
-            (?Pdev)
-            [-_\.]?
-            (?P[0-9]+)?
-        )?
-    )
-    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
-"""
-
-
-class Version(_BaseVersion):
-
-    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
-
-    def __init__(self, version: str) -> None:
-
-        # Validate the version and parse it into pieces
-        match = self._regex.search(version)
-        if not match:
-            raise InvalidVersion(f"Invalid version: '{version}'")
-
-        # Store the parsed out pieces of the version
-        self._version = _Version(
-            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
-            release=tuple(int(i) for i in match.group("release").split(".")),
-            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
-            post=_parse_letter_version(
-                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
-            ),
-            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
-            local=_parse_local_version(match.group("local")),
-        )
-
-        # Generate a key which will be used for sorting
-        self._key = _cmpkey(
-            self._version.epoch,
-            self._version.release,
-            self._version.pre,
-            self._version.post,
-            self._version.dev,
-            self._version.local,
-        )
-
-    def __repr__(self) -> str:
-        return f""
-
-    def __str__(self) -> str:
-        parts = []
-
-        # Epoch
-        if self.epoch != 0:
-            parts.append(f"{self.epoch}!")
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self.release))
-
-        # Pre-release
-        if self.pre is not None:
-            parts.append("".join(str(x) for x in self.pre))
-
-        # Post-release
-        if self.post is not None:
-            parts.append(f".post{self.post}")
-
-        # Development release
-        if self.dev is not None:
-            parts.append(f".dev{self.dev}")
-
-        # Local version segment
-        if self.local is not None:
-            parts.append(f"+{self.local}")
-
-        return "".join(parts)
-
-    @property
-    def epoch(self) -> int:
-        _epoch: int = self._version.epoch
-        return _epoch
-
-    @property
-    def release(self) -> Tuple[int, ...]:
-        _release: Tuple[int, ...] = self._version.release
-        return _release
-
-    @property
-    def pre(self) -> Optional[Tuple[str, int]]:
-        _pre: Optional[Tuple[str, int]] = self._version.pre
-        return _pre
-
-    @property
-    def post(self) -> Optional[int]:
-        return self._version.post[1] if self._version.post else None
-
-    @property
-    def dev(self) -> Optional[int]:
-        return self._version.dev[1] if self._version.dev else None
-
-    @property
-    def local(self) -> Optional[str]:
-        if self._version.local:
-            return ".".join(str(x) for x in self._version.local)
-        else:
-            return None
-
-    @property
-    def public(self) -> str:
-        return str(self).split("+", 1)[0]
-
-    @property
-    def base_version(self) -> str:
-        parts = []
-
-        # Epoch
-        if self.epoch != 0:
-            parts.append(f"{self.epoch}!")
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self.release))
-
-        return "".join(parts)
-
-    @property
-    def is_prerelease(self) -> bool:
-        return self.dev is not None or self.pre is not None
-
-    @property
-    def is_postrelease(self) -> bool:
-        return self.post is not None
-
-    @property
-    def is_devrelease(self) -> bool:
-        return self.dev is not None
-
-    @property
-    def major(self) -> int:
-        return self.release[0] if len(self.release) >= 1 else 0
-
-    @property
-    def minor(self) -> int:
-        return self.release[1] if len(self.release) >= 2 else 0
-
-    @property
-    def micro(self) -> int:
-        return self.release[2] if len(self.release) >= 3 else 0
-
-
-def _parse_letter_version(
-    letter: str, number: Union[str, bytes, SupportsInt]
-) -> Optional[Tuple[str, int]]:
-
-    if letter:
-        # We consider there to be an implicit 0 in a pre-release if there is
-        # not a numeral associated with it.
-        if number is None:
-            number = 0
-
-        # We normalize any letters to their lower case form
-        letter = letter.lower()
-
-        # We consider some words to be alternate spellings of other words and
-        # in those cases we want to normalize the spellings to our preferred
-        # spelling.
-        if letter == "alpha":
-            letter = "a"
-        elif letter == "beta":
-            letter = "b"
-        elif letter in ["c", "pre", "preview"]:
-            letter = "rc"
-        elif letter in ["rev", "r"]:
-            letter = "post"
-
-        return letter, int(number)
-    if not letter and number:
-        # We assume if we are given a number, but we are not given a letter
-        # then this is using the implicit post release syntax (e.g. 1.0-1)
-        letter = "post"
-
-        return letter, int(number)
-
-    return None
-
-
-_local_version_separators = re.compile(r"[\._-]")
-
-
-def _parse_local_version(local: str) -> Optional[LocalType]:
-    """
-    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
-    """
-    if local is not None:
-        return tuple(
-            part.lower() if not part.isdigit() else int(part)
-            for part in _local_version_separators.split(local)
-        )
-    return None
-
-
-def _cmpkey(
-    epoch: int,
-    release: Tuple[int, ...],
-    pre: Optional[Tuple[str, int]],
-    post: Optional[Tuple[str, int]],
-    dev: Optional[Tuple[str, int]],
-    local: Optional[Tuple[SubLocalType]],
-) -> CmpKey:
-
-    # When we compare a release version, we want to compare it with all of the
-    # trailing zeros removed. So we'll use a reverse the list, drop all the now
-    # leading zeros until we come to something non zero, then take the rest
-    # re-reverse it back into the correct order and make it a tuple and use
-    # that for our sorting key.
-    _release = tuple(
-        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
-    )
-
-    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
-    # We'll do this by abusing the pre segment, but we _only_ want to do this
-    # if there is not a pre or a post segment. If we have one of those then
-    # the normal sorting rules will handle this case correctly.
-    if pre is None and post is None and dev is not None:
-        _pre: PrePostDevType = NegativeInfinity
-    # Versions without a pre-release (except as noted above) should sort after
-    # those with one.
-    elif pre is None:
-        _pre = Infinity
-    else:
-        _pre = pre
-
-    # Versions without a post segment should sort before those with one.
-    if post is None:
-        _post: PrePostDevType = NegativeInfinity
-
-    else:
-        _post = post
-
-    # Versions without a development segment should sort after those with one.
-    if dev is None:
-        _dev: PrePostDevType = Infinity
-
-    else:
-        _dev = dev
-
-    if local is None:
-        # Versions without a local segment should sort before those with one.
-        _local: LocalType = NegativeInfinity
-    else:
-        # Versions with a local segment need that segment parsed to implement
-        # the sorting rules in PEP440.
-        # - Alpha numeric segments sort before numeric segments
-        # - Alpha numeric segments sort lexicographically
-        # - Numeric segments sort numerically
-        # - Shorter versions sort before longer versions when the prefixes
-        #   match exactly
-        _local = tuple(
-            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
-        )
-
-    return epoch, _release, _pre, _post, _dev, _local
diff --git a/pipenv/vendor/pip_shims/__init__.py b/pipenv/vendor/pip_shims/__init__.py
index 7575fc8823..3d78196d59 100644
--- a/pipenv/vendor/pip_shims/__init__.py
+++ b/pipenv/vendor/pip_shims/__init__.py
@@ -25,7 +25,7 @@
 
 from . import shims
 
-__version__ = "0.7.2"
+__version__ = "0.7.3"
 
 
 if "pip_shims" in sys.modules:
diff --git a/pipenv/vendor/pip_shims/compat.py b/pipenv/vendor/pip_shims/compat.py
index 34722d719d..a8c23fd957 100644
--- a/pipenv/vendor/pip_shims/compat.py
+++ b/pipenv/vendor/pip_shims/compat.py
@@ -11,7 +11,7 @@
 import sys
 import types
 
-from pipenv.vendor.packaging import specifiers
+from pipenv.patched.notpip._vendor.packaging import specifiers
 
 from .environment import MYPY_RUNNING
 from .utils import (
@@ -48,7 +48,7 @@
         Union,
     )
 
-    from pipenv.patched.notpip._vendor.requests import Session
+    from requests import Session
 
     from .utils import TShim, TShimmedFunc, TShimmedPath
 
@@ -380,7 +380,7 @@ def partial_command(shimmed_path, cmd_mapping=None):
     """
     Maps a default set of arguments across all members of a
     :class:`~pip_shims.models.ShimmedPath` instance, specifically for
-    :class:`~pipenv.patched.notpip._internal.command.Command` instances which need
+    :class:`~pip._internal.command.Command` instances which need
     `summary` and `name` arguments.
 
     :param :class:`~pip_shims.models.ShimmedPath` shimmed_path:  A
@@ -506,7 +506,7 @@ def get_requirement_set(
 
     :param :class:`~pip_shims.models.ShimmedPathCollection` wheel_cache_provider: A
         context manager provider which resolves to a `WheelCache` instance
-    :param install_command: A :class:`~pipenv.patched.notpip._internal.commands.install.InstallCommand`
+    :param install_command: A :class:`~pip._internal.commands.install.InstallCommand`
         instance which is used to generate the finder.
     :param :class:`~pip_shims.models.ShimmedPathCollection` req_set_provider: A provider
         to build requirement set instances.
@@ -543,7 +543,7 @@ def get_requirement_set(
     :param install_cmd_provider: A shim for providing new install command instances.
     :type install_cmd_provider: :class:`~pip_shims.models.ShimmedPathCollection`
     :return: A new requirement set instance
-    :rtype: :class:`~pipenv.patched.notpip._internal.req.req_set.RequirementSet`
+    :rtype: :class:`~pip._internal.req.req_set.RequirementSet`
     """
     wheel_cache_provider = resolve_possible_shim(wheel_cache_provider)
     req_set_provider = resolve_possible_shim(req_set_provider)
@@ -598,13 +598,13 @@ def get_package_finder(
     # type: (...) -> TFinder
     """Shim for compatibility to generate package finders.
 
-    Build and return a :class:`~pipenv.patched.notpip._internal.index.package_finder.PackageFinder`
-    instance using the :class:`~pipenv.patched.notpip._internal.commands.install.InstallCommand` helper
+    Build and return a :class:`~pip._internal.index.package_finder.PackageFinder`
+    instance using the :class:`~pip._internal.commands.install.InstallCommand` helper
     method to construct the finder, shimmed with backports as needed for compatibility.
 
     :param install_cmd_provider: A shim for providing new install command instances.
     :type install_cmd_provider: :class:`~pip_shims.models.ShimmedPathCollection`
-    :param install_cmd: A :class:`~pipenv.patched.notpip._internal.commands.install.InstallCommand`
+    :param install_cmd: A :class:`~pip._internal.commands.install.InstallCommand`
         instance which is used to generate the finder.
     :param optparse.Values options: An optional :class:`optparse.Values` instance
         generated by calling `install_cmd.parser.parse_args()` typically.
@@ -615,15 +615,15 @@ def get_package_finder(
     :param Optional[str] abi: The target abi to support, e.g. "cp38"
     :param Optional[str] implementation: An optional implementation string for limiting
         searches to a specific implementation, e.g. "cp" or "py"
-    :param target_python: A :class:`~pipenv.patched.notpip._internal.models.target_python.TargetPython`
+    :param target_python: A :class:`~pip._internal.models.target_python.TargetPython`
         instance (will be translated to alternate arguments if necessary on incompatible
         pip versions).
     :param Optional[bool] ignore_requires_python: Whether to ignore `requires_python`
         on resulting candidates, only valid after pip version 19.3.1
     :param target_python_builder: A 'TargetPython' builder (e.g. the class itself,
         uninstantiated)
-    :return: A :class:`pipenv.patched.notpip._internal.index.package_finder.PackageFinder` instance
-    :rtype: :class:`pipenv.patched.notpip._internal.index.package_finder.PackageFinder`
+    :return: A :class:`pip._internal.index.package_finder.PackageFinder` instance
+    :rtype: :class:`pip._internal.index.package_finder.PackageFinder`
 
     :Example:
 
@@ -719,7 +719,7 @@ def shim_unpack(
     # (...) -> None
     """
     Accepts all parameters that have been valid to pass
-    to :func:`pipenv.patched.notpip._internal.download.unpack_url` and selects or
+    to :func:`pip._internal.download.unpack_url` and selects or
     drops parameters as needed before invoking the provided
     callable.
 
@@ -728,9 +728,9 @@ def shim_unpack(
     :param str download_dir: The directory to download the file to
     :param TShimmedFunc tempdir_manager_provider: A callable or shim referring to
         `global_tempdir_manager` function from pip or a shimmed no-op context manager
-    :param Optional[:class:`~pipenv.patched.notpip._internal.req.req_install.InstallRequirement`] ireq:
+    :param Optional[:class:`~pip._internal.req.req_install.InstallRequirement`] ireq:
         an Install Requirement instance, defaults to None
-    :param Optional[:class:`~pipenv.patched.notpip._internal.models.link.Link`] link: A Link instance,
+    :param Optional[:class:`~pip._internal.models.link.Link`] link: A Link instance,
         defaults to None.
     :param Optional[str] location: A location or source directory if the target is
         a VCS url, defaults to None.
@@ -872,7 +872,7 @@ def make_preparer(
         the finder, session, and options if needed, defaults to None
     :param Optional[TShimmedFunc] finder_provider: A package finder provider
     :yield: A new requirement preparer instance
-    :rtype: ContextManager[:class:`~pipenv.patched.notpip._internal.operations.prepare.RequirementPreparer`]
+    :rtype: ContextManager[:class:`~pip._internal.operations.prepare.RequirementPreparer`]
 
     :Example:
 
@@ -889,7 +889,7 @@ def make_preparer(
     ...     options=pip_options, finder=finder, session=session, install_cmd=ic
     ... ) as preparer:
     ...     print(preparer)
-    
+    
     """
     preparer_fn = resolve_possible_shim(preparer_fn)
     downloader_provider = resolve_possible_shim(downloader_provider)
@@ -1068,7 +1068,7 @@ def get_resolver(
         the finder, session, and options if needed, defaults to None.
     :param bool use_pep517: Whether to use the pep517 build process.
     :return: A new resolver instance.
-    :rtype: :class:`~pipenv.patched.notpip._internal.legacy_resolve.Resolver`
+    :rtype: :class:`~pip._internal.legacy_resolve.Resolver`
 
     :Example:
 
@@ -1217,7 +1217,7 @@ def resolve(  # noqa:C901
 
     Maps a dictionary of names to corresponding ``InstallRequirement`` values.
 
-    :param :class:`~pipenv.patched.notpip._internal.req.req_install.InstallRequirement` ireq: An
+    :param :class:`~pip._internal.req.req_install.InstallRequirement` ireq: An
         InstallRequirement to initiate the resolution process
     :param :class:`~pip_shims.models.ShimmedPathCollection` reqset_provider: A provider
         to build requirement set instances.
@@ -1241,7 +1241,7 @@ def resolve(  # noqa:C901
     :param Optional[Values] options: Pip options to use if needed, defaults to None
     :param Optional[TSession] session: Existing session to use for getting requirements,
         defaults to None
-    :param :class:`~pipenv.patched.notpip._internal.legacy_resolve.Resolver` resolver: A pre-existing
+    :param :class:`~pip._internal.legacy_resolve.Resolver` resolver: A pre-existing
         resolver instance to use for resolution
     :param Optional[TFinder] finder: The package finder to use during resolution,
         defaults to None.
@@ -1274,8 +1274,8 @@ def resolve(  # noqa:C901
     :param bool check_supported_wheels: Whether to check support of wheels before including
         them in resolution.
     :return: A dictionary mapping requirements to corresponding
-        :class:`~pipenv.patched.notpip._internal.req.req_install.InstallRequirement`s
-    :rtype: :class:`~pipenv.patched.notpip._internal.req.req_install.InstallRequirement`
+        :class:`~pip._internal.req.req_install.InstallRequirement`s
+    :rtype: :class:`~pip._internal.req.req_install.InstallRequirement`
 
     :Example:
 
diff --git a/pipenv/vendor/pip_shims/models.py b/pipenv/vendor/pip_shims/models.py
index 049e385ce2..d300d2c5d2 100644
--- a/pipenv/vendor/pip_shims/models.py
+++ b/pipenv/vendor/pip_shims/models.py
@@ -36,7 +36,7 @@
 )
 
 if MYPY_RUNNING:
-    import packaging.version
+    from pipenv.patched.notpip._vendor.packaging.version import _BaseVersion
 
     Module = types.ModuleType
     from typing import (  # noqa:F811
@@ -76,7 +76,7 @@ def __init__(
         version,
         round_prereleases_up=True,
         base_import_path=None,
-        vendor_import_path="pipenv.patched.notpip._vendor",
+        vendor_import_path="pip._vendor",
     ):
         # type: (str, bool, Optional[str], str) -> None
         self.version = version
@@ -115,7 +115,7 @@ def __getitem__(self, item):
         return self.version_tuple[item]
 
     def _parse(self):
-        # type: () -> packaging.version._BaseVersion
+        # type: () -> _BaseVersion
         return parse_version(self.version)
 
     def __hash__(self):
diff --git a/pipenv/vendor/pip_shims/shims.py b/pipenv/vendor/pip_shims/shims.py
index 48f6c4324c..cfd063fd80 100644
--- a/pipenv/vendor/pip_shims/shims.py
+++ b/pipenv/vendor/pip_shims/shims.py
@@ -7,7 +7,7 @@
 import sys
 import types
 
-from pipenv.vendor.packaging.version import parse as parse_version
+from pipenv.patched.notpip._vendor.packaging.version import parse as parse_version
 
 from .models import (
     ShimmedPathCollection,
diff --git a/pipenv/vendor/pip_shims/utils.py b/pipenv/vendor/pip_shims/utils.py
index 2a31a4dfe6..1aef85a3d4 100644
--- a/pipenv/vendor/pip_shims/utils.py
+++ b/pipenv/vendor/pip_shims/utils.py
@@ -11,7 +11,7 @@
 from collections.abc import Callable
 from functools import wraps
 
-import packaging.version
+from pipenv.patched.notpip._vendor.packaging.version import _BaseVersion, parse
 
 from .environment import MYPY_RUNNING
 
@@ -107,10 +107,10 @@ def _parse(version):
 
 @memoize
 def parse_version(version):
-    # type: (str) -> packaging.version._BaseVersion
+    # type: (str) -> _BaseVersion
     if not isinstance(version, STRING_TYPES):
         raise TypeError("Can only derive versions from string, got {!r}".format(version))
-    return packaging.version.parse(version)
+    return parse(version)
 
 
 @memoize
@@ -387,7 +387,7 @@ def get_allowed_args(fn_or_class):
     try:
         signature = inspect.signature(fn_or_class)
     except AttributeError:
-        import pipenv.vendor.funcsigs as funcsigs
+        import funcsigs
 
         signature = funcsigs.signature(fn_or_class)
     args = []
diff --git a/pipenv/vendor/pythonfinder/models/python.py b/pipenv/vendor/pythonfinder/models/python.py
index 3ab900eeb9..c4a1b09ca4 100644
--- a/pipenv/vendor/pythonfinder/models/python.py
+++ b/pipenv/vendor/pythonfinder/models/python.py
@@ -10,7 +10,7 @@
 
 import pipenv.vendor.attr as attr
 import pipenv.vendor.six as six
-from pipenv.vendor.packaging.version import Version
+from pipenv.patched.notpip._vendor.packaging.version import Version
 
 from ..compat import Path, lru_cache
 from ..environment import ASDF_DATA_DIR, MYPY_RUNNING, PYENV_ROOT, SYSTEM_ARCH
diff --git a/pipenv/vendor/pythonfinder/utils.py b/pipenv/vendor/pythonfinder/utils.py
index a0eeda1a36..58f136fb5e 100644
--- a/pipenv/vendor/pythonfinder/utils.py
+++ b/pipenv/vendor/pythonfinder/utils.py
@@ -12,7 +12,7 @@
 
 import pipenv.vendor.attr as attr
 import pipenv.vendor.six as six
-from pipenv.vendor.packaging.version import LegacyVersion, Version
+from pipenv.patched.notpip._vendor.packaging.version import LegacyVersion, Version
 
 from .compat import Path, TimeoutError, lru_cache  # noqa
 from .environment import MYPY_RUNNING, PYENV_ROOT, SUBPROCESS_TIMEOUT
@@ -124,7 +124,7 @@ def get_python_version(path):
 @lru_cache(maxsize=1024)
 def parse_python_version(version_str):
     # type: (str) -> Dict[str, Union[str, int, Version]]
-    from pipenv.vendor.packaging.version import parse as parse_version
+    from pipenv.patched.notpip._vendor.packaging.version import parse as parse_version
 
     is_debug = False
     if version_str.endswith("-debug"):
diff --git a/pipenv/vendor/requirementslib/__init__.py b/pipenv/vendor/requirementslib/__init__.py
index 0563a4e4ae..aad93c1839 100644
--- a/pipenv/vendor/requirementslib/__init__.py
+++ b/pipenv/vendor/requirementslib/__init__.py
@@ -10,7 +10,7 @@
 from .models.pipfile import Pipfile
 from .models.requirements import Requirement
 
-__version__ = "1.6.6"
+__version__ = "1.6.7"
 
 
 logger = logging.getLogger(__name__)
diff --git a/pipenv/vendor/requirementslib/models/cache.py b/pipenv/vendor/requirementslib/models/cache.py
index 27ec92a565..e4e7e05a19 100644
--- a/pipenv/vendor/requirementslib/models/cache.py
+++ b/pipenv/vendor/requirementslib/models/cache.py
@@ -10,7 +10,7 @@
 import sys
 
 import pipenv.vendor.vistir as vistir
-from pipenv.vendor.packaging.requirements import Requirement
+from pipenv.patched.notpip._vendor.packaging.requirements import Requirement
 from pipenv.vendor.pip_shims.shims import FAVORITE_HASH, SafeFileCache
 from pipenv.vendor.platformdirs import user_cache_dir
 
diff --git a/pipenv/vendor/requirementslib/models/dependencies.py b/pipenv/vendor/requirementslib/models/dependencies.py
index e98a3a1752..d90364b73b 100644
--- a/pipenv/vendor/requirementslib/models/dependencies.py
+++ b/pipenv/vendor/requirementslib/models/dependencies.py
@@ -9,11 +9,11 @@
 from json import JSONDecodeError
 
 import pipenv.vendor.attr as attr
-import packaging.markers
-import packaging.version
 import pip_shims.shims
 import pipenv.patched.notpip._vendor.requests as requests
-from pipenv.vendor.packaging.utils import canonicalize_name
+from pipenv.patched.notpip._vendor.packaging.markers import Marker
+from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name
+from pipenv.patched.notpip._vendor.packaging.version import parse
 from pipenv.vendor.vistir.compat import fs_str
 from pipenv.vendor.vistir.contextmanagers import cd, temp_environ
 from pipenv.vendor.vistir.path import create_tracked_tempdir
@@ -48,8 +48,7 @@
         Union,
     )
 
-    from pipenv.vendor.packaging.markers import Marker
-    from pipenv.vendor.packaging.requirements import Requirement as PackagingRequirement
+    from pipenv.patched.notpip._vendor.packaging.requirements import Requirement as PackagingRequirement
     from pipenv.vendor.pip_shims.shims import (
         Command,
         InstallationCandidate,
@@ -137,7 +136,7 @@ def version_set(self):
 
         if len(self.candidates) == 1:
             return set()
-        return set(packaging.version.parse(version_from_ireq(c)) for c in self.candidates)
+        return set(parse(version_from_ireq(c)) for c in self.candidates)
 
     def compatible_versions(self, other):
         """Find compatible version numbers between this abstract dependency and
@@ -178,9 +177,7 @@ def compatible_abstract_dep(self, other):
             markers.add(other.markers)
         new_markers = None
         if markers:
-            new_markers = packaging.markers.Marker(
-                " or ".join(str(m) for m in sorted(markers))
-            )
+            new_markers = Marker(" or ".join(str(m) for m in sorted(markers)))
         new_ireq = copy.deepcopy(self.requirement.ireq)
         new_ireq.req.specifier = new_specifiers
         new_ireq.req.marker = new_markers
@@ -191,7 +188,7 @@ def compatible_abstract_dep(self, other):
         candidates = [
             c
             for c in self.candidates
-            if packaging.version.parse(version_from_ireq(c)) in compatible_versions
+            if parse(version_from_ireq(c)) in compatible_versions
         ]
         dep_dict = {}
         candidate_strings = [format_requirement(c) for c in candidates]
@@ -261,7 +258,7 @@ def from_requirement(cls, requirement, parent=None):
                 candidates.append(req)
                 candidates = sorted(
                     set(candidates),
-                    key=lambda k: packaging.version.parse(version_from_ireq(k)),
+                    key=lambda k: parse(version_from_ireq(k)),
                 )
         else:
             candidates = [requirement.ireq]
diff --git a/pipenv/vendor/requirementslib/models/markers.py b/pipenv/vendor/requirementslib/models/markers.py
index 08d0452718..d10979192a 100644
--- a/pipenv/vendor/requirementslib/models/markers.py
+++ b/pipenv/vendor/requirementslib/models/markers.py
@@ -7,9 +7,9 @@
 
 import pipenv.vendor.attr as attr
 import distlib.markers
-import packaging.version
-from pipenv.vendor.packaging.markers import InvalidMarker, Marker
-from pipenv.vendor.packaging.specifiers import LegacySpecifier, Specifier, SpecifierSet
+from pipenv.patched.notpip._vendor.packaging.markers import InvalidMarker, Marker
+from pipenv.patched.notpip._vendor.packaging.specifiers import LegacySpecifier, Specifier, SpecifierSet
+from pipenv.patched.notpip._vendor.packaging.version import parse
 from pipenv.vendor.vistir.misc import dedup
 
 from ..environment import MYPY_RUNNING
@@ -343,7 +343,7 @@ def get_versions(specset, group_by_operator=True):
         for grp, keys in itertools.groupby(version_tuples, key=initial_grouping_key)
     ]
     versions = [
-        (op, packaging.version.parse(".".join(str(v) for v in val)))
+        (op, parse(".".join(str(v) for v in val)))
         for op, vals in op_groups
         for val in vals
     ]
diff --git a/pipenv/vendor/requirementslib/models/metadata.py b/pipenv/vendor/requirementslib/models/metadata.py
index c3c366dabb..f725c71e39 100644
--- a/pipenv/vendor/requirementslib/models/metadata.py
+++ b/pipenv/vendor/requirementslib/models/metadata.py
@@ -15,13 +15,13 @@
 import dateutil.parser
 import distlib.metadata
 import distlib.wheel
-import packaging.version
 import pipenv.patched.notpip._vendor.requests as requests
 import pipenv.vendor.vistir as vistir
-from pipenv.vendor.packaging.markers import Marker
-from pipenv.vendor.packaging.requirements import Requirement as PackagingRequirement
-from pipenv.vendor.packaging.specifiers import Specifier, SpecifierSet
-from pipenv.vendor.packaging.tags import Tag
+from pipenv.patched.notpip._vendor.packaging.markers import Marker
+from pipenv.patched.notpip._vendor.packaging.requirements import Requirement as PackagingRequirement
+from pipenv.patched.notpip._vendor.packaging.specifiers import Specifier, SpecifierSet
+from pipenv.patched.notpip._vendor.packaging.tags import Tag
+from pipenv.patched.notpip._vendor.packaging.version import _BaseVersion, parse
 
 from ..environment import MYPY_RUNNING
 from .markers import (
@@ -776,8 +776,8 @@ def yanked(self):
 
     @property
     def parsed_version(self):
-        # type: () -> packaging.version._BaseVersion
-        return packaging.version.parse(self.version)
+        # type: () -> _BaseVersion
+        return parse(self.version)
 
     @property
     def wheels(self):
diff --git a/pipenv/vendor/requirementslib/models/project.py b/pipenv/vendor/requirementslib/models/project.py
index 9211cd31fc..4b883896ae 100644
--- a/pipenv/vendor/requirementslib/models/project.py
+++ b/pipenv/vendor/requirementslib/models/project.py
@@ -7,11 +7,11 @@
 import os
 
 import pipenv.vendor.attr as attr
-import packaging.markers
-import packaging.utils
 import pipenv.vendor.plette as plette
 import plette.models
 import pipenv.vendor.tomlkit as tomlkit
+from pipenv.patched.notpip._vendor.packaging.markers import Marker
+from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name
 
 SectionDifference = collections.namedtuple("SectionDifference", ["inthis", "inthat"])
 FileDifference = collections.namedtuple("FileDifference", ["default", "develop"])
@@ -23,11 +23,11 @@ def _are_pipfile_entries_equal(a, b):
     if a != b:
         return False
     try:
-        marker_eval_a = packaging.markers.Marker(a["markers"]).evaluate()
+        marker_eval_a = Marker(a["markers"]).evaluate()
     except (AttributeError, KeyError, TypeError, ValueError):
         marker_eval_a = True
     try:
-        marker_eval_b = packaging.markers.Marker(b["markers"]).evaluate()
+        marker_eval_b = Marker(b["markers"]).evaluate()
     except (AttributeError, KeyError, TypeError, ValueError):
         marker_eval_b = True
     return marker_eval_a == marker_eval_b
@@ -129,10 +129,7 @@ def contains_key_in_pipfile(self, key):
             self._get_pipfile_section(develop=True, insert=False),
         ]
         return any(
-            (
-                packaging.utils.canonicalize_name(name)
-                == packaging.utils.canonicalize_name(key)
-            )
+            (canonicalize_name(name) == canonicalize_name(key))
             for section in sections
             for name in section
         )
@@ -155,7 +152,7 @@ def add_line_to_pipfile(self, line, develop):
         section[key] = entry
 
     def remove_keys_from_pipfile(self, keys, default, develop):
-        keys = {packaging.utils.canonicalize_name(key) for key in keys}
+        keys = {canonicalize_name(key) for key in keys}
         sections = []
         if default:
             sections.append(self._get_pipfile_section(develop=False, insert=False))
@@ -164,13 +161,13 @@ def remove_keys_from_pipfile(self, keys, default, develop):
         for section in sections:
             removals = set()
             for name in section:
-                if packaging.utils.canonicalize_name(name) in keys:
+                if canonicalize_name(name) in keys:
                     removals.add(name)
             for key in removals:
                 del section._data[key]
 
     def remove_keys_from_lockfile(self, keys):
-        keys = {packaging.utils.canonicalize_name(key) for key in keys}
+        keys = {canonicalize_name(key) for key in keys}
         removed = False
         for section_name in ("default", "develop"):
             try:
@@ -179,7 +176,7 @@ def remove_keys_from_lockfile(self, keys):
                 continue
             removals = set()
             for name in section:
-                if packaging.utils.canonicalize_name(name) in keys:
+                if canonicalize_name(name) in keys:
                     removals.add(name)
             removed = removed or bool(removals)
             for key in removals:
diff --git a/pipenv/vendor/requirementslib/models/requirements.py b/pipenv/vendor/requirementslib/models/requirements.py
index 33db00da92..fc572476a4 100644
--- a/pipenv/vendor/requirementslib/models/requirements.py
+++ b/pipenv/vendor/requirementslib/models/requirements.py
@@ -16,15 +16,15 @@
 import pipenv.vendor.attr as attr
 import pipenv.vendor.pip_shims as pip_shims
 from pipenv.vendor.cached_property import cached_property
-from pipenv.vendor.packaging.markers import Marker
-from pipenv.vendor.packaging.requirements import Requirement as PackagingRequirement
-from pipenv.vendor.packaging.specifiers import (
+from pipenv.patched.notpip._vendor.packaging.markers import Marker
+from pipenv.patched.notpip._vendor.packaging.requirements import Requirement as PackagingRequirement
+from pipenv.patched.notpip._vendor.packaging.specifiers import (
     InvalidSpecifier,
     LegacySpecifier,
     Specifier,
     SpecifierSet,
 )
-from pipenv.vendor.packaging.utils import canonicalize_name
+from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name
 from pipenv.vendor.vistir.contextmanagers import temp_path
 from pipenv.vendor.vistir.misc import dedup
 from pipenv.vendor.vistir.path import (
diff --git a/pipenv/vendor/requirementslib/models/setup_info.py b/pipenv/vendor/requirementslib/models/setup_info.py
index 2a3e969cf8..4acd6d6a45 100644
--- a/pipenv/vendor/requirementslib/models/setup_info.py
+++ b/pipenv/vendor/requirementslib/models/setup_info.py
@@ -17,13 +17,12 @@
 from weakref import finalize
 
 import pipenv.vendor.attr as attr
-import packaging.specifiers
-import packaging.utils
-import packaging.version
 import pep517.envbuild
 import pep517.wrappers
 from pipenv.vendor.distlib.wheel import Wheel
-from pipenv.vendor.packaging.markers import Marker
+from pipenv.patched.notpip._vendor.packaging.markers import Marker
+from pipenv.patched.notpip._vendor.packaging.specifiers import SpecifierSet
+from pipenv.patched.notpip._vendor.packaging.version import parse
 from pipenv.vendor.pip_shims.utils import call_function_with_correct_args
 from pipenv.vendor.platformdirs import user_cache_dir
 from pipenv.vendor.vistir.contextmanagers import cd, temp_path
@@ -72,7 +71,7 @@
     )
 
     import pipenv.patched.notpip._vendor.requests as requests
-    from pipenv.vendor.packaging.requirements import Requirement as PackagingRequirement
+    from pipenv.patched.notpip._vendor.packaging.requirements import Requirement as PackagingRequirement
     from pipenv.vendor.pip_shims.shims import InstallRequirement, PackageFinder
     from pkg_resources import DistInfoDistribution, EggInfoDistribution, PathMetadata
     from pkg_resources import Requirement as PkgResourcesRequirement
@@ -493,15 +492,17 @@ def _find_variable_in_body(body: "Iterable[Any]", name: str) -> "Optional[Any]":
             if not isinstance(elem, (ast.Assign, ast.AnnAssign)):
                 continue
 
-            if getattr(elem, "target", None) and elem.target.id == name:
-                return elem.value
-
-            for target in elem.targets:
-                if not isinstance(target, ast.Name):
+            if isinstance(elem, ast.AnnAssign):
+                if not isinstance(elem.target, ast.Name):
                     continue
-
-                if target.id == name:
+                if elem.value and elem.target.id == name:
                     return elem.value
+            else:
+                for target in elem.targets:
+                    if not isinstance(target, ast.Name):
+                        continue
+                    if target.id == name:
+                        return elem.value
         return None
 
     @staticmethod
@@ -1015,9 +1016,7 @@ class SetupInfo(object):
     build_requires = attr.ib(default=None, eq=True)  # type: Optional[Tuple]
     build_backend = attr.ib(eq=True)  # type: STRING_TYPE
     setup_requires = attr.ib(default=None, eq=True)  # type: Optional[Tuple]
-    python_requires = attr.ib(
-        default=None, eq=True
-    )  # type: Optional[packaging.specifiers.SpecifierSet]
+    python_requires = attr.ib(default=None, eq=True)  # type: Optional[SpecifierSet]
     _extras_requirements = attr.ib(default=None, eq=True)  # type: Optional[Tuple]
     setup_cfg = attr.ib(type=Path, default=None, eq=True, hash=False)
     setup_py = attr.ib(type=Path, default=None, eq=True, hash=False)
@@ -1096,7 +1095,7 @@ def update_from_dict(self, metadata):
         version = metadata.get("version", None)
         if version:
             try:
-                packaging.version.parse(version)
+                parse(version)
             except TypeError:
                 version = self.version if self.version else None
             else:
diff --git a/pipenv/vendor/requirementslib/models/utils.py b/pipenv/vendor/requirementslib/models/utils.py
index 3939c5c588..0f2f40d036 100644
--- a/pipenv/vendor/requirementslib/models/utils.py
+++ b/pipenv/vendor/requirementslib/models/utils.py
@@ -13,9 +13,9 @@
 
 import pipenv.vendor.tomlkit as tomlkit
 from pipenv.vendor.attr import validators
-from pipenv.vendor.packaging.markers import InvalidMarker, Marker, Op, Value, Variable
-from pipenv.vendor.packaging.specifiers import InvalidSpecifier, Specifier, SpecifierSet
-from pipenv.vendor.packaging.version import parse as parse_version
+from pipenv.patched.notpip._vendor.packaging.markers import InvalidMarker, Marker, Op, Value, Variable
+from pipenv.patched.notpip._vendor.packaging.specifiers import InvalidSpecifier, Specifier, SpecifierSet
+from pipenv.patched.notpip._vendor.packaging.version import parse as parse_version
 from pipenv.vendor.plette.models import Package, PackageCollection
 from pipenv.vendor.tomlkit.container import Container
 from pipenv.vendor.tomlkit.items import AoT, Array, Bool, InlineTable, Item, String, Table
@@ -45,11 +45,11 @@
     )
 
     from pipenv.vendor.attr import _ValidatorType  # noqa
-    from pipenv.vendor.packaging.markers import Marker as PkgResourcesMarker
-    from pipenv.vendor.packaging.markers import Op as PkgResourcesOp
-    from pipenv.vendor.packaging.markers import Value as PkgResourcesValue
-    from pipenv.vendor.packaging.markers import Variable as PkgResourcesVariable
-    from pipenv.vendor.packaging.requirements import Requirement as PackagingRequirement
+    from pipenv.patched.notpip._vendor.packaging.markers import Marker as PkgResourcesMarker
+    from pipenv.patched.notpip._vendor.packaging.markers import Op as PkgResourcesOp
+    from pipenv.patched.notpip._vendor.packaging.markers import Value as PkgResourcesValue
+    from pipenv.patched.notpip._vendor.packaging.markers import Variable as PkgResourcesVariable
+    from pipenv.patched.notpip._vendor.packaging.requirements import Requirement as PackagingRequirement
     from pipenv.vendor.pip_shims.shims import Link
     from pkg_resources import Requirement as PkgResourcesRequirement
     from pipenv.vendor.urllib3.util.url import Url
@@ -912,7 +912,7 @@ def clean_requires_python(candidates):
     `requires_python` attributes."""
     all_candidates = []
     sys_version = ".".join(map(str, sys.version_info[:3]))
-    from pipenv.vendor.packaging.version import parse as parse_version
+    from pipenv.patched.notpip._vendor.packaging.version import parse as parse_version
 
     py_version = parse_version(os.environ.get("PIP_PYTHON_VERSION", sys_version))
     for c in candidates:
@@ -936,7 +936,7 @@ def clean_requires_python(candidates):
 
 
 def fix_requires_python_marker(requires_python):
-    from pipenv.vendor.packaging.requirements import Requirement as PackagingRequirement
+    from pipenv.patched.notpip._vendor.packaging.requirements import Requirement as PackagingRequirement
 
     marker_str = ""
     if any(requires_python.startswith(op) for op in Specifier._operators.keys()):
@@ -986,7 +986,7 @@ def get_name_variants(pkg):
 
     if not isinstance(pkg, str):
         raise TypeError("must provide a string to derive package names")
-    from pipenv.vendor.packaging.utils import canonicalize_name
+    from pipenv.patched.notpip._vendor.packaging.utils import canonicalize_name
     from pkg_resources import safe_name
 
     pkg = pkg.lower()
diff --git a/pipenv/vendor/requirementslib/utils.py b/pipenv/vendor/requirementslib/utils.py
index 9048a8efb5..11c11cf297 100644
--- a/pipenv/vendor/requirementslib/utils.py
+++ b/pipenv/vendor/requirementslib/utils.py
@@ -171,7 +171,7 @@ def convert_entry_to_path(path):
 def is_installable_file(path):
     # type: (PipfileType) -> bool
     """Determine if a path can potentially be installed."""
-    from pipenv.vendor.packaging import specifiers
+    from pipenv.patched.notpip._vendor.packaging import specifiers
 
     if isinstance(path, Mapping):
         path = convert_entry_to_path(path)
@@ -645,7 +645,7 @@ def remerge_exit(path, key, old_parent, new_parent, new_items):
         if sourced:
 
             def remerge_visit(path, key, value):
-                source_map[path + (key,)] = t_name
+                source_map[path + (key,)] = t_name  # noqa: B023
                 return True
 
         else:
diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt
index 432dd5c4f5..174b1cfc84 100644
--- a/pipenv/vendor/vendor.txt
+++ b/pipenv/vendor/vendor.txt
@@ -13,11 +13,10 @@ idna==3.2
 iso8601==0.1.16
 markupsafe==2.0.1
 orderedmultidict==1.0.1
-packaging==21.3
 parse==1.19.0
 pep517==0.11.0
 pexpect==4.8.0
-pip-shims==0.7.2
+pip-shims==0.7.3
 pipdeptree==2.2.1
 platformdirs==2.4.0
 plette[validation]==0.2.3
@@ -26,7 +25,7 @@ pyparsing==3.0.7
 python-dateutil==2.8.2
 python-dotenv==0.19.0
 pythonfinder==1.2.10
-requirementslib==1.6.6
+requirementslib==1.6.7
 shellingham==1.4.0
 six==1.16.0
 termcolor==1.1.0
diff --git a/pipenv/vendor/wheel/vendored/packaging/_typing.py b/pipenv/vendor/wheel/vendored/packaging/_typing.py
index 480a196c71..0d62b82872 100644
--- a/pipenv/vendor/wheel/vendored/packaging/_typing.py
+++ b/pipenv/vendor/wheel/vendored/packaging/_typing.py
@@ -18,7 +18,7 @@
 
 In packaging, all static-typing related imports should be guarded as follows:
 
-    from pipenv.vendor.packaging._typing import TYPE_CHECKING
+    from pipenv.patched.notpip._vendor.packaging._typing import TYPE_CHECKING
 
     if TYPE_CHECKING:
         from typing import ...
diff --git a/tasks/vendoring/__init__.py b/tasks/vendoring/__init__.py
index 063b2bf671..893cd9b30a 100644
--- a/tasks/vendoring/__init__.py
+++ b/tasks/vendoring/__init__.py
@@ -69,6 +69,7 @@
     "pip": "pipenv.patched.notpip",
     "functools32": "pipenv.vendor.backports.functools_lru_cache",
     "requests": "pipenv.patched.notpip._vendor.requests",
+    "packaging": "pipenv.patched.notpip._vendor.packaging",
 }
 
 GLOBAL_REPLACEMENT = [